diff --git a/.editorconfig b/.editorconfig new file mode 100755 index 00000000..7aa72618 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,11 @@ +# editorconfig.org +root = true + +[*] +indent_size = 2 +indent_style = space +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = off diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..3b3e9303 --- /dev/null +++ b/.env.example @@ -0,0 +1,2 @@ +# Production license for @nuxt/ui-pro, get one at https://ui.nuxt.com/pro/purchase +NUXT_UI_PRO_LICENSE= diff --git a/.firebaserc b/.firebaserc new file mode 100644 index 00000000..e983fad5 --- /dev/null +++ b/.firebaserc @@ -0,0 +1,7 @@ +{ + "projects": { + "default": "zksync-docs-staging-5eb09", + "prod": "zksync-docs" + }, + "etags": {} +} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..d150257f --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# This CODEOWNERS file sets the individuals responsible for code in the era-test-node repository. + +# These users are the default owners for everything in the repo. +# They will be requested for review when someone opens a pull request. +* @matter-labs/devxp + +# You can also specify code owners for specific directories or files. +# For example: +# /src/ @developer1 @developer2 +# /docs/ @documenter diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..994da041 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,47 @@ +name: Bug Report +description: File a bug report. +title: "[Bug]: " +labels: ["bug", "triage"] +assignees: + - matter-labs/devxp +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! Please fill out as much as you can, the more details the better we can help to resolve the issue. + - type: textarea + id: what-happened + attributes: + label: What happened? + description: Also tell us, what did you expect to happen? + placeholder: Tell us what you see! + validations: + required: true + - type: textarea + id: expected + attributes: + label: What did you expect to happen? + placeholder: Tell us what you expected! + - type: textarea + id: environment + attributes: + label: Environment + description: Please share any relevant information about your environment. + placeholder: | + - Operating System: [e.g. macOS] + - Node version: [e.g., v18.17.0] + - Other relevant environment details: + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: shell + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/matter-labs/zksync-docs/blob/main/CODE_OF_CONDUCT.md). + options: + - label: I agree to follow this project's Code of Conduct + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..60f25512 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: true +contact_links: + - name: zksync-developers Discussion + url: https://github.com/zkSync-Community-Hub/zkync-developers/discussions + about: These forms are for zkSync Docs related issues. If you have questions or need help, please visit the zksync-developers Discussion. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..be9801d3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,31 @@ +name: Feature Request +description: Is there a feature missing you would like to see? Let us know! +title: "[Feature]: " +labels: ["feature", "triage"] +assignees: + - matter-labs/devxp +body: + - type: markdown + attributes: + value: | + If you have a feature you'd like to see, please fill out this form. + - type: textarea + id: description + attributes: + label: Description + description: Please provide a brief description of the feature you would like to see. + placeholder: Tell us what you would like to see! + - type: textarea + id: rationale + attributes: + label: Rationale + description: Why do you think this feature would be beneficial to the project? + placeholder: Tell us why you think this feature would be beneficial! + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/matter-labs/zksync-docs/blob/main/CODE_OF_CONDUCT.md). + options: + - label: I agree to follow this project's Code of Conduct + required: true diff --git a/.github/ISSUE_TEMPLATE/feedback.yml b/.github/ISSUE_TEMPLATE/feedback.yml new file mode 100644 index 00000000..fe149887 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feedback.yml @@ -0,0 +1,31 @@ +name: Feedback +description: Please share any feedback for us on our content! +title: "[Feedback]: " +labels: ["feedback", "triage"] +assignees: + - matter-labs/devxp +body: + - type: markdown + attributes: + value: | + If you have feedback on our content, please fill out this form. + - type: input + id: page + attributes: + label: Page + description: If this is related to a specific page, please provide the URL. + placeholder: https://docs.zksync.io/page + - type: textarea + id: description + attributes: + label: Description + description: Please provide a brief description of the feedback you would like to share. + placeholder: Tell us what you would like to share! + - type: checkboxes + id: terms + attributes: + label: Code of Conduct + description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/matter-labs/zksync-docs/blob/main/CODE_OF_CONDUCT.md). + options: + - label: I agree to follow this project's Code of Conduct + required: true diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 00000000..d7df2fbd --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,26 @@ + + +# Description + + + +## Linked Issues + + + + +## Additional context diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 00000000..76f5a95d --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,55 @@ +name: check 🕵️ + +on: + pull_request: + branches: [main, staging] + workflow_dispatch: + +env: + HUSKY: 0 + CI: true + +jobs: + format: + name: code_format + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v1 + - name: Install Dependencies + run: bun install + - name: Runs Prettier Formatter + run: bun run lint:prettier + + spelling: + name: spelling + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v1 + - name: Install Dependencies + run: bun install + - name: Runs cSpell Spell Checker + run: bun run lint:spelling + + markdown-lint: + name: markdown_lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v1 + - name: Install Dependencies + run: bun install + - name: Runs Markdown Linter + run: bun run lint:markdown + + eslint: + name: code_lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: oven-sh/setup-bun@v1 + - name: Install Dependencies + run: bun install + - name: Runs ESLint + run: bun run lint:eslint diff --git a/.github/workflows/deploy-preview.yml b/.github/workflows/deploy-preview.yml new file mode 100644 index 00000000..054e27cb --- /dev/null +++ b/.github/workflows/deploy-preview.yml @@ -0,0 +1,35 @@ +name: "preview builder 👀" +on: + pull_request: + workflow_dispatch: + +env: + NUXT_SITE_ENV: staging # used for NuxtSEO to disable things like indexing on staging + HUSKY: 0 + CI: true + +jobs: + build_and_preview: + env: + NUXT_UI_PRO_LICENSE: ${{ secrets.NUXT_UI_PRO_LICENSE }} + if: ${{ github.event_name == 'workflow_dispatch' || github.event.pull_request.head.repo.full_name == github.repository }} + runs-on: ubuntu-latest + outputs: + output_urls: "${{ steps.preview_deploy.outputs.urls }}" + steps: + - uses: actions/checkout@v4 # v4 + + - uses: oven-sh/setup-bun@v1 + + - name: "Install dependencies" + run: bun install --production --frozen-lockfile + + - name: "Deploy target: staging" + run: bun run build + + - name: "Deploy preview" + uses: matter-labs/action-hosting-deploy@main + with: + repoToken: '${{ secrets.GITHUB_TOKEN }}' + firebaseServiceAccount: "${{ secrets.FIREBASE_SERVICE_ACCOUNT_ZKSYNC_DOCS_STAGING_5EB09 }}" + projectId: zksync-docs-staging-5eb09 diff --git a/.github/workflows/deploy-prod.yml b/.github/workflows/deploy-prod.yml new file mode 100644 index 00000000..30e5b1b8 --- /dev/null +++ b/.github/workflows/deploy-prod.yml @@ -0,0 +1,38 @@ +name: "deploy 🚀" +on: + push: + branches: [main] + +env: + HUSKY: 0 + CI: true + +concurrency: + group: production + cancel-in-progress: true + +jobs: + build_and_deploy: + env: + NUXT_UI_PRO_LICENSE: ${{ secrets.NUXT_UI_PRO_LICENSE }} + if: ${{ github.event.repository.full_name == github.repository }} + runs-on: ubuntu-latest + environment: production + steps: + - uses: actions/checkout@v4 # v4 + with: + persist-credentials: false # <--- checking this in commit context + + - uses: oven-sh/setup-bun@v1 + + - name: "Install dependencies" + run: bun install --production --frozen-lockfile + - name: "Deploy target: staging" + run: bun run build + + - uses: matter-labs/action-hosting-deploy@main + with: + repoToken: "${{ secrets.GITHUB_TOKEN }}" + firebaseServiceAccount: "${{ secrets.FIREBASE_SERVICE_ACCOUNT_ZKSYNC_DOCS }}" + projectId: zksync-docs + channelId: live diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml new file mode 100644 index 00000000..61100243 --- /dev/null +++ b/.github/workflows/deploy-staging.yml @@ -0,0 +1,40 @@ +name: "deploy staging 🏗️" +on: + push: + branches: [staging] + workflow_dispatch: + +env: + NUXT_SITE_ENV: staging # used for NuxtSEO to disable things like indexing on staging + HUSKY: 0 + CI: true + +concurrency: + group: staging + cancel-in-progress: true + +jobs: + build_and_deploy: + env: + NUXT_UI_PRO_LICENSE: ${{ secrets.NUXT_UI_PRO_LICENSE }} + if: ${{ github.event.repository.full_name == github.repository }} + environment: staging + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 # v4 + with: + ref: "${{ github.event.inputs.ref }}" + + - uses: oven-sh/setup-bun@v1 + + - name: "Install dependencies" + run: bun install --production --frozen-lockfile + - name: "Deploy target: staging" + run: bun run build + + - uses: matter-labs/action-hosting-deploy@main + with: + repoToken: "${{ secrets.GITHUB_TOKEN }}" + firebaseServiceAccount: "${{ secrets.FIREBASE_SERVICE_ACCOUNT_ZKSYNC_DOCS_STAGING_5EB09 }}" + projectId: zksync-docs-staging-5eb09 + channelId: live diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..f159cf65 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,32 @@ +name: release 🚀 + +on: + push: + branches: [ main ] + +env: + HUSKY: 0 + CI: true + +jobs: + release: + if: ${{ github.event.repository.full_name == github.repository }} && {{ !contains(github.event.head_commit.message, "skip ci") }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 # v4 + with: + fetch-depth: 0 + persist-credentials: false + + - uses: actions/setup-node@v4 + with: + node-version: 'latest' + - uses: oven-sh/setup-bun@v1 + + - name: "Install dependencies" + run: bun install --production --frozen-lockfile + + - name: "Release" + run: bunx semantic-release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/secrets_scanner.yaml b/.github/workflows/secrets_scanner.yaml index f68823d1..4be8437d 100644 --- a/.github/workflows/secrets_scanner.yaml +++ b/.github/workflows/secrets_scanner.yaml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 with: fetch-depth: 0 - name: TruffleHog OSS diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..763090fe --- /dev/null +++ b/.gitignore @@ -0,0 +1,102 @@ +# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore + +# Logs + +logs +*.log + +# Diagnostic reports (https://nodejs.org/api/report.html) + +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# Runtime data + +pids +_.pid +_.seed +\*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover + +lib-cov + +# Coverage directory used by tools like istanbul + +coverage +\*.lcov + +# Dependency directories + +node_modules/ +jspm_packages/ + +# TypeScript cache + +\*.tsbuildinfo + +# Optional npm cache directory + +.npm + +# npm and pnpm +package-lock.json +__package_previews__ +.store + +privatePackages/store +pnpm-lock.yaml + +# Optional eslint cache + +.eslintcache + +# Optional stylelint cache + +.stylelintcache + +# dotenv environment variable files + +.env +.env.development.local +.env.test.local +.env.production.local +.env.local +.env.* +!.env.example + +# parcel-bundler cache (https://parceljs.org/) + +.cache +.parcel-cache + +# Nuxt.js build / generate output + +.nuxt +dist + +# yarn + +.yarn-integrity +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* +yarn.lock + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store + +.history + +# Nuxt dev/build outputs +.output +.data +.nuxt +.nitro +.cache +dist +.firebase \ No newline at end of file diff --git a/.husky/commit-msg b/.husky/commit-msg new file mode 100755 index 00000000..9e359542 --- /dev/null +++ b/.husky/commit-msg @@ -0,0 +1,3 @@ +#!/bin/sh +. "$(dirname "$0")/_/husky.sh" +bunx --no-install commitlint --edit "$1" diff --git a/.husky/install.mjs b/.husky/install.mjs new file mode 100644 index 00000000..5a33bee7 --- /dev/null +++ b/.husky/install.mjs @@ -0,0 +1,6 @@ +// Skip Husky install in production and CI +if (process.env.NODE_ENV === 'production' || process.env.CI === 'true') { + process.exit(0); +} +const husky = (await import('husky')).default; +console.log(husky()); diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100755 index 00000000..93f85662 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,3 @@ +#!/usr/bin/env sh +. "$(dirname "$0")/_/husky.sh" +bun lint-staged diff --git a/.lintstagedrc.yml b/.lintstagedrc.yml new file mode 100644 index 00000000..f6b2abef --- /dev/null +++ b/.lintstagedrc.yml @@ -0,0 +1,8 @@ +'*.{js,ts,vue}': + - prettier --list-different + - eslint +'*.md': + - markdownlint-cli2 + - cspell check +'*.{json,yml}': + - prettier --list-different diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 00000000..a9e61989 --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://raw.githubusercontent.com/DavidAnson/markdownlint/main/schema/markdownlint-config-schema.json", + "default": true, + "MD001": false, + "MD007": { + "indent": 2 + }, + "MD013": { + "code_blocks": false, + "line_length": 150, + "tables": false + }, + "MD033": false, + "MD003": false, + "MD034": false, + "MD024": false, + "MD022": false, + "MD023": false, + "MD046": false +} diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..3f5a5547 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,5 @@ +node_modules +.github +.idea +public +**/*.md diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 00000000..2bebcd86 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,14 @@ +{ + "plugins": ["prettier-plugin-tailwindcss"], + "bracketSameLine": false, + "printWidth": 120, + "useTabs": false, + "tabWidth": 2, + "semi": true, + "trailingComma": "es5", + "singleQuote": true, + "bracketSpacing": true, + "vueIndentScriptAndStyle": false, + "proseWrap": "always", + "singleAttributePerLine": true +} diff --git a/.releaserc b/.releaserc new file mode 100644 index 00000000..57ce18f2 --- /dev/null +++ b/.releaserc @@ -0,0 +1,12 @@ +{ + "branches": "main", + "debug": true, + "tagFormat": "${version}", + "addReleases": "top", + "npmPublish": false, + "plugins": [ + "@semantic-release/commit-analyzer", + "@semantic-release/release-notes-generator", + "@semantic-release/github" + ] +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 00000000..12c3eb1e --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,19 @@ +{ + "recommendations": [ + "bradlc.vscode-tailwindcss", + "christian-kohler.npm-intellisense", + "christian-kohler.path-intellisense", + "DavidAnson.vscode-markdownlint", + "dbaeumer.vscode-eslint", + "editorconfig.editor", + "esbenp.prettier-vscode", + "mikestead.dotenv", + "nuxt.mdc", + "rvest.vs-code-prettier-eslint", + "simonsiefke.svg-preview", + "streetsidesoftware.code-spell-checker", + "vue.volar", + "yoavbls.pretty-ts-errors" + ], + "unwantedRecommendations": ["vue.vscode-typescript-vue-plugin"] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..7cbd388d --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,22 @@ +{ + "editor.defaultFormatter": "esbenp.prettier-vscode", + "editor.formatOnType": false, + "editor.formatOnPaste": false, + "editor.formatOnSave": true, + "editor.formatOnSaveMode": "file", + "editor.tabSize": 2, + "editor.insertSpaces": true, + "files.eol": "\n", + "[markdown]": { + "editor.wordWrap": "on", + "editor.defaultFormatter": "DavidAnson.vscode-markdownlint", + "editor.formatOnSave": true, + "editor.suggest.showWords": false, + "editor.quickSuggestions": { + "other": "on", + "comments": "off", + "strings": "off" + }, + "editor.tabCompletion": "onlySnippets" + } +} diff --git a/.vscode/snippets.code-snippets b/.vscode/snippets.code-snippets new file mode 100644 index 00000000..3467bf9d --- /dev/null +++ b/.vscode/snippets.code-snippets @@ -0,0 +1,46 @@ +{ + // Place your zksync-docs workspace snippets here. Each snippet is defined under a snippet name and has a scope, prefix, body and + // description. Add comma separated ids of the languages where the snippet is applicable in the scope field. If scope + // is left empty or omitted, the snippet gets applied to all languages. The prefix is what is + // used to trigger the snippet and the body will be expanded and inserted. Possible variables are: + // $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders. + // Placeholders with the same ids are connected. + // Example: + // "Print to console": { + // "scope": "javascript,typescript", + // "prefix": "log", + // "body": [ + // "console.log('$1');", + // "$2" + // ], + // "description": "Log output to console" + // } + "ZK Snippet": { + "scope": "markdown", + "prefix": "@zk", + "body": ["%%zk_$1%%$0"], + }, + "Callout": { + "scope": "markdown", + "prefix": "@callout", + "body": ["::callout{icon=\"$1\" color=\"$2\"}", "$0", "::"], + "description": "Callout component", + }, + "AlertCallout": { + "scope": "markdown", + "prefix": "@alert", + "body": ["::callout{icon=\"i-heroicons-exclamation-triangle\" color=\"amber\"}", "$0", "::"], + "description": "Callout component", + }, + "InfoCallout": { + "scope": "markdown", + "prefix": "@info", + "body": ["::callout{icon=\"i-heroicons-information-circle\" color=\"blue\"}", "$0", "::"], + "description": "Callout component", + }, + "CodeGroup": { + "scope": "markdown", + "prefix": "@codegroup", + "body": ["::code-group", "", "```bash [yarn]", "", "```", "", "```bash [npm]", "", "```", "", "::"], + }, +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..b0ab6ead --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,87 @@ +# Code of Conduct + +Version: 1.1 + +Apache 2.0 license, derived from the Apache Foundation Code of Conduct. +Also, CC BY-SA 3.0 derived from the Mozilla Community Participation Guidelines. + +Our goal is to cultivate a safe, friendly, and inclusive space that benefits all participants in the zkSync ecosystem. +This Code of Conduct outlines our shared values and expectations to help ensure that the community remains a positive and enriching environment for everyone. + +## When and how to use this Code of Conduct + +This is your guide for engaging as a participant in the zkSync ecosystem. +It applies to all physical and digital spaces related to zkSync. + +## Expected behaviors + +**Be ethical**: +We endeavor to enrich the zkSync ecosystem, while not infringing on the rights and wellbeing of others. +We also endeavor to enrich ourselves without causing harm to the zkSync community. +We do not encourage tax evasion, promoting information leaks, speculating on tokens or token prices, or otherwise breaking the law. + +**Be kind and respectful**: +Treat everyone with kindness, empathy, and respect. +We all come from different backgrounds, perspectives and experiences, +so let's celebrate our differences and foster a culture of openness and understanding. +We may have strong feelings about other layer 1 and layer 2 blockchains, +but that is no reason to disparage, defame, or slander any competitor to zkSync or what other chains are doing. +Feel free to compare metrics and features, but keep to the facts and be respectful of all the builders in web3 +trying to advance freedom through blockchain technology! + +**Share and learn**: +Our community is a space for sharing knowledge, experiences, and ideas. +Positively contribute to discussions, offer helpful feedback, +be willing to educate others on your work and remain open to learning from others. + +**Give credit**: +When sharing content or ideas that aren't your own, ensure you give proper credit to the original creator. +Plagiarism and intellectual property infringement are strictly prohibited. + +**Respect privacy**: +Always seek consent before sharing personal information about yourself or others. +Respecting each other's privacy is vital to building trust within our community. + +**Be inquisitive and embrace continuous improvement**: +We strive to improve from each experience, and are open to constructive criticism. +We encourage questions, and redirect them to the appropriate channel if we do not have the answer. + +**Mind your language**: +Communication is key. +Use clear and considerate language in your interactions. +We aim to create a welcoming environment for users of all ages, so please avoid excessive profanity or explicit content. +Remember that zkSync community members are a diverse bunch. +English is our primary working language, but to help others where English is not their first language, +be succinct and avoid acronyms where possible. + +**Stay on topic**: +While we encourage friendly conversations, please ensure your discussions remain relevant to the community's purpose. +To keep our space focused and valuable, off-topic or irrelevant content may be redirected or removed. +Specific topics that are not appropriate include offering to buy or sell any cryptocurrency or engage in price speculation. + +**No hate speech or harassment**: +Let's maintain a constructive and uplifting atmosphere in all interactions. +We have a zero-tolerance policy for any form of hate speech, bullying, harassment, or discrimination. +This includes, but is not limited to: + +- Violent threats or language directed against another person. +- Sexist, racist, or otherwise discriminatory jokes and language. +- Posting sexually explicit or violent material. +- Posting (or threatening to post) other people's personally identifying information ("doxing"). +- Sharing private content without explicit consent, such as messages sent privately or non-publicly. +- Personal insults. +- Unwelcome sexual attention. +- Excessive or unnecessary profanity. +- Repeated harassment of others. In general, if someone asks you to stop, then stop. +- Advocating for, or encouraging, any of the above behavior. + +**Have fun and connect**: +Finally, remember that ZK Squad and the zkSync community is a place to connect, learn, and enjoy. +Participate in a manner that encourages positive interactions and enhances the experiences of all. + +## Managing violations + +If you come across inappropriate content or behavior, please report it without delay. +By working together, we can maintain a positive and safe environment. + +If you are the subject of, or witness to, any violations of this Code of Conduct, please contact us at community@zksync.io. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..15944456 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,3 @@ +# Contributing + +Our primary contributing documentation is available on our site at [Contribution Guidelines](https://docs.zksync.io/build/contributing-to-documentation). diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 00000000..f49a4e16 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 00000000..195fb6cc --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Matter Labs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 14d82d9a..4fe944ec 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,105 @@ -# zksync-docs -Docs for zkSync +# 🌟 zkSync Developer Documentation + +[![License](https://img.shields.io/badge/license-MIT-blue)](LICENSE-MIT) +[![License: Apache 2.0](https://img.shields.io/badge/license-Apache%202.0-orange)](LICENSE-APACHE) +[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](https://www.contributor-covenant.org/) +[![Contributions Welcome](https://img.shields.io/badge/contributions-welcome-orange)](CONTRIBUTING.md) +[![X (formerly Twitter) Follow](https://badgen.net/badge/twitter/@zksyncDevs/1DA1F2?icon&label)](https://x.com/zksyncDevs) + +Welcome to the **zkSync Docs** repository! This is your go-to hub for all things +zkSync. Dive into our comprehensive documentation whether you're just starting out or looking for advanced guides. + +## 🚀 Quick Start + +1. **Install Bun:** Follow the [installation instructions](https://bun.sh/docs/installation). +2. **Install Dependencies:** + + ```sh + bun install + ``` + +3. **Run Locally:** Start the development server at `http://localhost:3000`. + + ```sh + bun run dev + ``` + +## 📚 Documentation Overview + +Unlock the full potential of zkSync with our comprehensive resources: + +- **🛠️ Build:** Learn how to develop and deploy your smart contracts and +applications on zkSync Era. Our step-by-step guides and tutorials will help you get started quickly and efficiently. + +- **🔗 ZK Stack:** Dive into the Zero-Knowledge (ZK) Stack to discover how to +configure and build a ZK chain tailored for your application. Explore the architecture, components, and best practices. + +- **🌐 zkSync Node:** Set up and run your own zkSync full node. Gain a deeper +understanding of node operations, observability, and maintenance to ensure optimal performance and reliability. + +- **🌍 Ecosystem:** Explore the vibrant zkSync ecosystem. Discover a wide array +of projects and tooling built for zkSync developers and users, from wallets and +explorers to integrations and developer tools. Stay updated with the latest innovations and community contributions. + +## 🛠️ Built With + +- [Vue](https://vuejs.org/) +- [Nuxt](https://nuxt.com/) +- [Nuxt Content](https://content.nuxt.com/) +- [Nuxt UI & Nuxt UI Pro](https://ui.nuxt.com/) +- [Tailwind](https://tailwindcss.com/) +- [Bun](https://bun.sh/) + +## 🖥️ Local Development + +Run the project locally: + +```sh +bun run dev +``` + +## Local Preview 👀 + +To locally preview the production build, first run `bun run build` to build the project, then run the following: + +```shell +bun run preview +``` + +## Lint & Formatting ✨ + +This project provides lint commands to check the project. + +### Run CI Checks ✔️ + +```shell +bun run ci:check +``` + +### Markdown Linting 📝 + +Markdown files are found in the `/content` directory. The following lint commands will run within that directory: + +```shell +bun run lint:spelling +bun run lint:markdown +``` + +### Linting 🧹 + +The following commands are available to run linting on the project: + +```shell +bun run lint:prettier +bun run lint:eslint +``` + +## 🤝 Contributions + +We welcome contributions from the community! Check out the following resources to get started: + +- [Contribution Overview](./content/00.build/90.contributing-to-documentation/10.index.md) +- [Contribution Guidelines](./content/00.build/90.contributing-to-documentation/20.contribution-guidelines.md) +- [Documentation Styleguide](./content/00.build/90.contributing-to-documentation/30.documentation-styleguide.md) + +Join us in making zkSync Docs better for everyone! 🌐✨ diff --git a/SUPPORT.md b/SUPPORT.md new file mode 100644 index 00000000..073c3ed3 --- /dev/null +++ b/SUPPORT.md @@ -0,0 +1,31 @@ +# Support + +Are you having trouble with our documentation or having some issues with a guide? +We have multiple channels available for you to request help. + +## Where to get help + +If you're in need of help with authoring your content for zkSync Docs, +first read through the [Contribution Guidelines](https://docs.zksync.io/build/contributing-to-documentation/contribution-guidelines) +to see if it may help answer some questions. +If the content there does not solve your problem, submit an [issue in GitHub](https://github.com/matter-labs/zksync-docs/issues) +and we can try to help resolve the issue. + +### Help with developer related issues +If you are referencing the documentation for a project as a developer and run into issues, +you can go to the [zksync-developers Discussions](https://github.com/zkSync-Community-Hub/zksync-developers/discussions) +where you can submit a question. We also have a [Discord community](https://join.zksync.dev/) +that may help with any troubleshooting. + +If you have questions related to any of our zkSync tools, +we recommend that you submit an Issue related to that project. +You can find the list of our open-source repositories in the [Contribution Track](https://docs.zksync.io/build/resources/contribution-track) +on our zkSync Docs site. + +## 🪲 Submit a bug report + +Have you come across a bug while going through our documentation? +Perhaps the versions are out of date or a particular tool is broken when trying to run a project. +Submit an [issue in GitHub](https://github.com/matter-labs/zksync-docs/issues) using our Bug Issue form +or submit feedback directly from the page! In the right sidebar on the site, there is a link below the Table of Contents +for "Share feedback" which will set up a pre-filled Issue form for the page you're on. diff --git a/app.config.ts b/app.config.ts new file mode 100644 index 00000000..e747ab7b --- /dev/null +++ b/app.config.ts @@ -0,0 +1,10 @@ +/** + * Nuxt App Configuration that allows for runtime editing. + * This content can be accessible in components with the + * globally available `useAppConfig()`. + */ +export default defineAppConfig({ + seo: { + siteName: 'zkSync Docs', + }, +}); diff --git a/app.vue b/app.vue new file mode 100644 index 00000000..3900968e --- /dev/null +++ b/app.vue @@ -0,0 +1,70 @@ + + + diff --git a/bun.lockb b/bun.lockb new file mode 100755 index 00000000..0c049f86 Binary files /dev/null and b/bun.lockb differ diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..3f5e287f --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1 @@ +export default { extends: ['@commitlint/config-conventional'] }; diff --git a/components/layout/DocsBase.vue b/components/layout/DocsBase.vue new file mode 100644 index 00000000..046628b0 --- /dev/null +++ b/components/layout/DocsBase.vue @@ -0,0 +1,81 @@ + + + diff --git a/components/layout/Toc.vue b/components/layout/Toc.vue new file mode 100644 index 00000000..959586cb --- /dev/null +++ b/components/layout/Toc.vue @@ -0,0 +1,54 @@ + + + diff --git a/content/00.build/00.index.md b/content/00.build/00.index.md new file mode 100644 index 00000000..9cc97278 --- /dev/null +++ b/content/00.build/00.index.md @@ -0,0 +1,92 @@ +--- +title: Introduction +description: Welcome to the zkSync Era Docs. +--- + +::card-group + ::card + --- + title: Quickstart + icon: i-heroicons-rocket-launch-solid + to: /build/quick-start + --- + Experience a quick tutorial on building and deploying smart contracts on zkSync Era. + :: + ::card + --- + title: zkSync 101 + icon: i-heroicons-academic-cap-solid + to: /build/zksync-101 + --- + Learn to build projects locally for zkSync Era using zksync-cli. + :: +:: + +**zkSync Era** is a Layer 2 +**[ZK rollup](/build/resources/glossary#zk-rollup)**, a trustless protocol that +uses cryptographic validity proofs to provide scalable and low-cost transactions on Ethereum. +In zkSync Era, computation is performed off-chain and most data is stored off-chain as well. +Transactions are bundled into batches before generating a validity proof. +As all validity proofs are proven on Ethereum, users enjoy the same security +warranties as in the L1. + +zkSync Era is made to look and feel like Ethereum, but with a higher throughput and lower fees. +Just like on Ethereum, smart contracts are written in Solidity/Vyper and can be called using the same clients as in +other EVM-compatible chains. + +You don't need to register a separate private key before using it; +zkSync supports existing Ethereum wallets out of the box. + +## Main features +:check-icon Security inherited from Ethereum, with zero reliance on 3rd parties. + +:check-icon Permissionless EVM-compatible smart contracts. + +:check-icon Preserving key EVM features, such as smart contract composability. + +:check-icon Standard Web3 API. + +:check-icon State updates via transaction outputs (also known as state diffs) which provides significant cost savings +over transaction inputs. + +:check-icon Native account abstraction with improvements over EIP4337 (implemented in Ethereum and other rollups). + +You can find [more information about zkSync Era in L2BEAT](https://l2beat.com/scaling/projects/zksync-era#stage). + +## Developer experience + +zkSync Era was built to provide a similar developer experience as Ethereum. + +:check-icon Smart contracts can be written in Solidity or Vyper. + +:check-icon Smart contracts are compiled with custom compilers: **[zksolc and +zkvyper](/zk-stack/components/compiler/toolchain)**. + +:check-icon Most contracts work out of the box so migrating projects is seamless. + +:check-icon Use existing frameworks +like [Hardhat](/build/tooling/hardhat/getting-started), libraries like +[Ethers](https://docs.ethers.org/v6/), [Viem](https://viem.sh/zksync), or +[web3.js](https://web3js.readthedocs.io/en/v1.5.2/index.html), and tools like [theGraph](https://thegraph.com/), +[Thirdweb](https://thirdweb.com/zksync), or +[Chainlink](https://docs.chain.link/data-feeds/price-feeds/addresses?network=zksync&page=1). + +:check-icon Web3 API compatibility enables support of most developer tools. + +:check-icon Different **[tools for testing and debugging +locally](/build/test-and-debug/)**. + +## User experience + +Interacting with applications built on zkSync Era is seamless, cheap and fast. + +- Transactions have instant confirmations and fast finality on L1. +- Transaction fees are extremely low ([average transaction costs](https://www.growthepie.xyz/fundamentals/transaction-costs)). +- Transaction fees can be conveniently paid with ERC20 tokens (e.g. USDC) thanks to + **[native account abstraction and paymasters](/build/developer-reference/account-abstraction)**. +- Support for existing Ethereum-based wallets like Metamask, TrustWallet, Zerion, Rabby, etc. + +::callout{icon="i-heroicons-cube-transparent-solid" color="blue"} +If you have not connected to zkSync Era before, you will need to +configure your wallet with the network. To add zkSync Era, see [Connect to zkSync Era](/build/connect-to-zksync). +:: diff --git a/content/00.build/01.connect-to-zksync.md b/content/00.build/01.connect-to-zksync.md new file mode 100644 index 00000000..2f350315 --- /dev/null +++ b/content/00.build/01.connect-to-zksync.md @@ -0,0 +1,34 @@ +--- +title: Connect to zkSync Era +description: Step-by-step guide to connect your wallet to zkSync Era mainnet and testnet. +--- + +## Add zkSync Era to your MetaMask wallet + +You can add zkSync Era to your MetaMask wallet using the buttons below: + +:network-adder{ network="mainnet" } :network-adder{ network="testnet" } + +If you are using a different in-browser wallet, the buttons above may also work for them. + +## Manually add zkSync Era + +To manually add zkSync Era as a custom network in your wallet, follow these steps: + +1. Find the “Add Network” option in your wallet (in MetaMask, you can find this in the networks dropdown). +1. Click on “Add Network" and "Add network manually". +1. Fill in the following details for the zkSync Era network environments: + +### Mainnet network details + +:display-partial{ path="_partials/_mainnet-network-details" } + +### Sepolia testnet network details + +:display-partial{ path="_partials/_testnet-network-details" } + +## Get testnet funds for your wallet + +Once you have your wallet connected to a zkSync Era environment, +you can get testnet funds from one of many [testnet faucets](/ecosystem/network-faucets) +to use with [deploying your first smart contract](/build/quick-start/deploy-your-first-contract). diff --git a/content/00.build/05.quick-start/1.index.md b/content/00.build/05.quick-start/1.index.md new file mode 100644 index 00000000..f49d3492 --- /dev/null +++ b/content/00.build/05.quick-start/1.index.md @@ -0,0 +1,22 @@ +--- +title: Overview +description: Learn how to interact with zkSync Era +--- + +Welcome to the Quickstart! + +This tutorial will help you to learn how to write, compile and deploy smart contracts to zkSync Era. +In the first step, you will build and deploy a simple contract and interact with it by sending messages. +The second section will have you creating your own ERC20 token and the final section will +introduce a specialized feature of zkSync Era, native account abstraction and paymasters. + +This section is designed for developers new to zkSync Era and uses online IDE's - Remix and AtlasZK - +to help you learn as quickly and efficiently as possible. + +## Get started + +- If you haven't already added zkSync Era to your wallet, follow the instructions in [Connect zkSync Era to your wallet](/build/connect-to-zksync). +- Continue to [Deploy your first contract](/build/quick-start/deploy-your-first-contract) to learn how to use Remix or Atlas + to deploy a contract onto zkSync Era. +- If you are familiar with zkSync Era and want to develop using `zksync-cli` locally + on your machine, you can jump over to the [zkSync 101](/build/zksync-101) section. diff --git a/content/00.build/05.quick-start/3.deploy-your-first-contract.md b/content/00.build/05.quick-start/3.deploy-your-first-contract.md new file mode 100644 index 00000000..d650016a --- /dev/null +++ b/content/00.build/05.quick-start/3.deploy-your-first-contract.md @@ -0,0 +1,125 @@ +--- +title: Deploy your first contract +description: Deploy a smart contract to zkSync from your browser using Remix or Atlas in under 5 minutes +--- + +This tutorial shows you how to deploy and interact with a smart contract on zkSync Era in less than 5 minutes. +It will help you get familiar with the zkSync smart contract development and deployment process using different tools. + +In this section you will learn how to: + +:check-icon Build a smart contract to exchange messages with Zeek. + +:check-icon Deploy the smart contract to the %%zk_testnet_name%%. + +:check-icon Interact with the contract from your browser using Remix or Atlas. + +## Prerequisites + +1. Before you start, make sure that +[you’ve configured the %%zk_testnet_name%% in your wallet](/build/connect-to-zksync). +2. Have at least 0.5 %%zk_testnet_name%% ETH. If you need more, use [one of the faucets](/ecosystem/network-faucets). + +## Review the smart contract code + +The smart contract will store messages from users and emit events with replies from Zeek. +The entire code is as follows: + +```solidity +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract ZeekMessages { + string[] private messages; + + // Event to acknowledge a new message + event MessageReceived(string); + + constructor() { + // Zeek initializes the contract with a welcome message + emit MessageReceived("Zeek welcomes you to zkSync!"); + } + + function sendMessage(string memory _message) public { + messages.push(_message); + + // Acknowledge the message receipt with Zeek's reply + emit MessageReceived("ZK is the endgame - Message received!"); + } + + // Function to count the total messages sent to Zeek + function getTotalMessages() public view returns (uint) { + return messages.length; + } + + // Function to return the last message sent to Zeek + function getLastMessage() public view returns (string memory) { + require(messages.length > 0, "No messages sent to Zeek yet!"); + return messages[messages.length - 1]; + } +} + +``` + +The Solidity smart contract contains two functions: + +- `sendMessage` stores the messages sent by users in the `messages` state variable. +- `getTotalMessages` returns the number of messages stored in the smart contract. +- `getLastMessage` returns the last message sent. + +::callout{icon="i-heroicons-light-bulb"} +zkSync Era is [EVM compatible](/build/resources/glossary#evm-compatible). +You can write smart contracts with Solidity or Vyper and use existing popular libraries like OpenZeppelin, just like on Ethereum. +:: + +## Compile and deploy the contract + +To compile and deploy the contract you can use either Atlas or Remix: + +::content-switcher +--- +items: [{ + label: 'Atlas', + partial: '_deploy_first/_atlas_deploy_contract' +}, { + label: 'Remix', + partial: '_deploy_first/_remix_deploy_contract' +}] +--- +:: + +## Check the contract in explorer + +Copy the smart contract address from Atlas/Remix and search it via the [%%zk_testnet_name%% +explorer](%%zk_testnet_block_explorer_url%%). You’ll see the contract has a transaction from the message you just sent. + +![Contract in zkSync explorer](/images/101-quickstart/101-contract-deployed.png) + +The status will be “Processed” on zkSync and “Sending” on Ethereum. [Learn more about the transaction lifecycle on zkSync](/zk-stack/concepts/transaction-lifecycle). + +In the “Contract” tab you’ll see the contract source code as Atlas and Remix automatically verified the contract for us. +When a smart contract is verified in a block explorer, it means that the source code of the contract has been published +and matched to the compiled version on the blockchain enhancing transparency, as users can review the contract’s source +code to understand its functions and intentions. + +Finally in the “Events” tab, you’ll see the replies from Zeek as these are emitted as events in our smart contract. + +![Contract events in zkSync explorer](/images/101-quickstart/101-contract-events.png) + +ZK is the endgame ✌️ + +## Takeaways + +- **EVM-compatibility**: zkSync Era is EVM-compatible and you can write smart contracts in Solidity or Vyper as in + Ethereum. +- **Custom compilers**: smart contracts deployed to zkSync Era must be compiled with the customs compilers: `zksolc` for + Solidity and `zkvyper` for Vyper. +- **Browser-based IDEs**: Existing tools like Atlas and Remix use zkSync custom compilers under the hood. + +## Next steps + +- Continue learning by [deploying an ERC20 token to zkSync](/build/quick-start/erc20-token). +- Join the [zkSync developer community in Discord](https://join.zksync.dev/) where you can ask any questions about this tutorial in the #dev-101 + channel +- Join our [GitHub Discussions Community](%%zk_git_repo_zksync-developers%%/discussions/) to + help other devs building on zkSync or share your project. diff --git a/content/00.build/05.quick-start/4.erc20-token.md b/content/00.build/05.quick-start/4.erc20-token.md new file mode 100644 index 00000000..a446fe9c --- /dev/null +++ b/content/00.build/05.quick-start/4.erc20-token.md @@ -0,0 +1,55 @@ +--- +title: Create an ERC20 token +description: In this tutorial you'll build and deploy an ERC20 token to %%zk_testnet_name%% +--- + +This tutorial shows you how to deploy and interact with an ERC20 token on %%zk_testnet_name%%. + +This is what you're going to do: + +:check-icon Build an ERC20 token smart contract with additional custom logic + +:check-icon Deploy the smart contract to the %%zk_testnet_name%% using Remix or Atlas. + +## Prerequisites + +1. Before you start, make sure that +[you’ve configured the %%zk_testnet_name%% in your wallet](/build/connect-to-zksync). +2. Have at least 0.5 %%zk_testnet_name%% ETH. If you need more, use [one of the faucets](/ecosystem/network-faucets). + +To complete this tutorial you'll use either Atlas or Remix. Select your preferred tool: + +::content-switcher +--- +items: [{ + label: 'Atlas', + partial: '_erc20_tutorial/_atlas_erc20_tutorial' +}, { + label: 'Remix', + partial: '_erc20_tutorial/_remix_erc20_tutorial' +}] +--- +:: + +## Takeaways + +- **zkSync is EVM compatible** and supports existing smart contract libraries like OpenZeppelin +- **Use popular libraries like** `ethers` , `viem`, or `web3.js` to interact with smart contracts deployed on zkSync. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The ERC20 token code is provided “as is” without any express or implied warranties. + +- The regulatory regime governing digital assets is still developing and is unclear in many jurisdictions. + +- ERC20 tokens may possess unique legal, tax, and market risks, so it is up to you to determine which, if any, laws apply to your deployment of ERC20 tokens. + +- The developers and publishers of this software disclaim any liability for any legal issues that may arise from its use. +:: + +## Next steps + +- Join the [zkSync developer community in Discord](https://join.zksync.dev/) where you can ask any questions about this tutorial in the #dev-101 + channel +- Continue learning about [paymasters and paying fees with this ERC20 token](/build/quick-start/paymasters-introduction). +- Join our [GitHub Discussions Community](%%zk_git_repo_zksync-developers%%/discussions/) to + help other devs building on zkSync or share your project. diff --git a/content/00.build/05.quick-start/5.paymasters-introduction.md b/content/00.build/05.quick-start/5.paymasters-introduction.md new file mode 100644 index 00000000..87e8802a --- /dev/null +++ b/content/00.build/05.quick-start/5.paymasters-introduction.md @@ -0,0 +1,196 @@ +--- +title: Paymasters introduction +description: Learn about paymasters and use one to pay transaction fees with your own token +--- + +This tutorial makes use of smart contracts deployed in the previous two tutorials, +[Deploy your first contract](/build/quick-start/deploy-your-first-contract) and [Create an ERC20 token](/build/quick-start/erc20-token). +This section introduces one of the custom features +of zkSync: native account abstraction and paymasters. + +In this tutorial we will: + +:check-icon Learn about paymasters. + +:check-icon Review the testnet paymaster smart contract code. + +:check-icon Use the testnet paymaster to pay transaction fees with our own ERC20 token. + +## Prerequisites + +1. Before you start, make sure that +[you’ve configured the %%zk_testnet_name%% in your browser wallet by following the instructions here](/build/connect-to-zksync). +1. In addition, fund your wallet with %%zk_testnet_name%% ETH using [one of the available faucets](/ecosystem/network-faucets). + +## What is a Paymaster? + +Paymasters in the zkSync ecosystem represent a groundbreaking approach to handling transaction fees. +They are special accounts designed to subsidize transaction costs for other accounts, +potentially making certain transactions free for end-users. +This feature is particularly useful for dApp developers looking +to improve their platform's accessibility and user experience by covering transaction fees on behalf of their users. + +Every paymaster has the following two functions: + +- `validateAndPayForPaymasterTransaction` : this function uses the transaction parameters (fields like `from`, `amount` , `to` + ) to execute the required validations and pay for the transaction fee. + +- `postTransaction`: this optional function runs after the transaction is executed. + +![zksync paymaster](/images/101-paymasters/zksync-paymaster.png) + +## Paymaster smart contract code + +Although application developers are encouraged to create their own paymaster smart contract, zkSync provides a testnet +paymaster for convenience and testing purposes. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The paymaster smart contract code is provided "as-is" without any express or implied warranties. +
+ +- Users are solely responsible for ensuring that their design, implementation, + and use of the paymaster smart contract software complies with all applicable laws, + including but not limited to money transmission, anti-money laundering (AML), and payment processing regulations. + +- The developers and publishers of this software disclaim any liability for any legal issues that may arise from its use. +:: + +The testnet paymaster address is +[0x3cb2b87d10ac01736a65688f3e0fb1b070b3eea3](https://sepolia.explorer.zksync.io/address/0x3cb2b87d10ac01736a65688f3e0fb1b070b3eea3) + +::drop-panel + ::panel{label="TestnetPaymaster.sol"} + ```solidity + // SPDX-License-Identifier: MIT + + pragma solidity 0.8.20; + + import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + + import "./interfaces/IPaymaster.sol"; + import "./interfaces/IPaymasterFlow.sol"; + import "./L2ContractHelper.sol"; + + // This is a dummy paymaster. It expects the paymasterInput to contain its "signature" as well as the needed exchange rate. + // It supports only approval-based paymaster flow. + contract TestnetPaymaster is IPaymaster { + function validateAndPayForPaymasterTransaction( + bytes32, + bytes32, + Transaction calldata _transaction + ) external payable returns (bytes4 magic, bytes memory context) { + // By default we consider the transaction as accepted. + magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; + + require(msg.sender == BOOTLOADER_ADDRESS, "Only bootloader can call this contract"); + require(_transaction.paymasterInput.length >= 4, "The standard paymaster input must be at least 4 bytes long"); + + bytes4 paymasterInputSelector = bytes4(_transaction.paymasterInput[0:4]); + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + // While the actual data consists of address, uint256 and bytes data, + // the data is not needed for the testnet paymaster + (address token, uint256 amount, ) = abi.decode(_transaction.paymasterInput[4:], (address, uint256, bytes)); + + // Firstly, we verify that the user has provided enough allowance + address userAddress = address(uint160(_transaction.from)); + address thisAddress = address(this); + + uint256 providedAllowance = IERC20(token).allowance(userAddress, thisAddress); + require(providedAllowance >= amount, "The user did not provide enough allowance"); + + // The testnet paymaster exchanges X wei of the token to the X wei of ETH. + uint256 requiredETH = _transaction.gasLimit * _transaction.maxFeePerGas; + if (amount < requiredETH) { + // Important note: while this clause definitely means that the user + // has underpaid the paymaster and the transaction should not accepted, + // we do not want the transaction to revert, because for fee estimation + // we allow users to provide smaller amount of funds then necessary to preserve + // the property that if using X gas the transaction success, then it will succeed with X+1 gas. + magic = bytes4(0); + } + + // Pulling all the tokens from the user + try IERC20(token).transferFrom(userAddress, thisAddress, amount) {} catch (bytes memory revertReason) { + // If the revert reason is empty or represented by just a function selector, + // we replace the error with a more user-friendly message + if (revertReason.length <= 4) { + revert("Failed to transferFrom from users' account"); + } else { + assembly { + revert(add(0x20, revertReason), mload(revertReason)) + } + } + } + + // The bootloader never returns any data, so it can safely be ignored here. + (bool success, ) = payable(BOOTLOADER_ADDRESS).call{value: requiredETH}(""); + require(success, "Failed to transfer funds to the bootloader"); + } else { + revert("Unsupported paymaster flow"); + } + } + + function postTransaction( + bytes calldata _context, + Transaction calldata _transaction, + bytes32, + bytes32, + ExecutionResult _txResult, + uint256 _maxRefundedGas + ) external payable override { + // Nothing to do + } + + receive() external payable {} + } + ``` + :: +:: + +In the `validateAndPayForPaymasterTransaction` it is: + +1. Checking that the paymasterInput is `approvalBased`. +2. Checking that the allowance of a given ERC20 is enough. +3. Transferring the transaction fee (`requiredETH`) in ERC20 from the user’s balance to the paymaster. +4. Transferring the transaction fee in ETH from the paymaster contract to the bootloader. + +## How to send a transaction through a paymaster? + +In order to send a transaction through a paymaster, the transaction must include the following additional parameters: + +- `paymasterAddress`: the smart contract address of the paymaster +- `type`: should be `General` or `ApprovalBased` (to pay fees with ERC20 tokens) +- `minimalAllowance`: the amount of ERC20 tokens to be approved for spending (for `approvalBased` type paymasters only). +- `innerInput`: any payload we want to send to the paymaster (optional). + +We’ll see an example next. + +## Interacting with the testnet paymaster + +We’re going to interact with the `ZeekSecretMessages.sol` contract that we created in the first tutorial and use the +ERC20 token that we deployed in the second tutorial to pay the transaction fees. + +::content-switcher +--- +items: [{ + label: 'Atlas', + partial: '_paymaster_intro/_atlas_paymaster_intro' +}, { + label: 'Remix', + partial: '_paymaster_intro/_remix_paymaster_intro' +}] +--- +:: + +## Takeaways + +- Paymasters on zkSync allow any account to pay fees with ERC20 tokens or enable gasless transactions. + +- Paymasters are smart contracts that can have any validations and rules. +- To send a transaction through a paymaster, we only need to include additional parameters in the transaction. + +## Next steps + +- Learn more about paymasters and native account abstraction in this section of the docs. +- Browse different paymaster examples in [this open source repository](https://github.com/matter-labs/paymaster-examples). +- Continue learning in zkSync 101 by building a [GoFundMe clone](/build/zksync-101). diff --git a/content/00.build/05.quick-start/_deploy_first/_atlas_deploy_contract.md b/content/00.build/05.quick-start/_deploy_first/_atlas_deploy_contract.md new file mode 100644 index 00000000..98efd4de --- /dev/null +++ b/content/00.build/05.quick-start/_deploy_first/_atlas_deploy_contract.md @@ -0,0 +1,37 @@ +--- +title: Quickstart with Atlas +--- +Atlas is a browser-based IDE with an integrated AI assistant that allows you to write, test and deploy smart contracts +directly from your browser. Click the button below to open the project in Atlas. + +:u-button{ icon="i-heroicons-code-bracket" size="lg" color="primary" variant="solid" :trailing="false" +to="https://app.atlaszk.com/projects?template=https://github.com/zkSync-Community-Hub/zksync-quickstart-atlas&open=/contracts/ZeekSecretMessages.sol&chainId=%%zk_testnet_chain_id%%" +target="_blank" label="Open smart contract in Atlas"} + +### Compile and deploy the contract + +Enter a name for the project and you will see the contract in the Atlas code editor. +On the right side, make sure the selected network is “%%zk_testnet_name%%“ +and click on **“Deploy”** to trigger the smart contract compilation and deployment. + +![Contract in Atlas](/images/101-quickstart/101-atlas-contract.png) + +::callout{icon="i-heroicons-light-bulb"} +Behind the scenes, Atlas is using the zkSync Era custom solidity compiler +(named `zksolc` ) to generate ZKEVM compatible bytecode. [Learn more about zkSync custom compilers](/zk-stack/components/compiler/toolchain). +:: + +Once compiled sign the transaction with your wallet and wait until it's processed. You’ll see the contract in the +“Deployed contracts” section. Congratulations, you’ve deployed your first smart contract to %%zk_testnet_name%%! + +### Interact with the contract + +Below the contract name you can find its deployment address. The “Live Contract State” section displays the smart +contract balance and the value returned by the `getTotalMessages` function. + +![Contract deployed](/images/101-quickstart/101-atlas-deployed.png) + +The “Write Functions” section contains the form to interact with the `sendMessage` function. Write a message, click the +“Run” button and confirm the transaction in your wallet. You’ll see that the `getTotalMessages` is updated to `1` and +`getLastMessage` returns the message you just sent. That means our contract is storing the messages as expected! But how +can you see the replies from Zeek? diff --git a/content/00.build/05.quick-start/_deploy_first/_remix_deploy_contract.md b/content/00.build/05.quick-start/_deploy_first/_remix_deploy_contract.md new file mode 100644 index 00000000..71dceb79 --- /dev/null +++ b/content/00.build/05.quick-start/_deploy_first/_remix_deploy_contract.md @@ -0,0 +1,53 @@ +--- +title: Quickstart with Remix +--- + +The Remix IDE is an open-source web and desktop application that supports Ethereum smart contract development and +deployment, offering tools for writing, testing, debugging, and deploying smart contracts written in Solidity to EVM +compatible protocols. + +### Enable the Remix zkSync plugin + +:display_partial{path="/_partials/_enable-remix-zksync-plugin"} + +Click the button below to open the project in Remix and see the contract in the Remix code editor. + +:u-button{ icon="i-heroicons-code-bracket" size="lg" color="primary" variant="solid" :trailing="false" +to="https://remix.ethereum.org/#url=https://github.com/zkSync-Community-Hub/zksync-quickstart-remix/blob/master/contracts/ZeekSecretMessages.sol" +target="_blank" label="Open smart contract in Remix"} + +### Connect your wallet + +Make sure your wallet is currently connected to the %%zk_testnet_name%% as we will use our wallet’s configured +network to deploy our smart contract. In Remix, under the Environment Section, select “Wallet” and click on +“Connect Wallet”. + +### Compile the contract + +To compile the contract, click on "Compile ZeeksSecretMessages.sol". If you get a popup message requesting permissions to +access **`ACCESS TO "WRITEFILE" OF "FILE MANAGER"`,** click on "Accept". + +::callout{icon="i-heroicons-light-bulb"} +Behind the scenes, Remix is using the zkSync Era custom solidity compiler +(named `zksolc`) to generate zkSync VM compatible bytecode. [Learn more about zkSync custom compilers](/zk-stack/components/compiler/toolchain). +:: + +### Deploy the contract + +To deploy the contract, open the "Deploy" dropdown, check the "Verify contract" checkbox, and click on “Deploy & +Verify”. Sign the transaction in your wallet and wait a few seconds until it's processed. Congratulations, you’ve +deployed your first contract to %%zk_testnet_name%%! + +![Remix interact zkSync contract](/images/101-quickstart/101-remix-deploy.png) + +### Interact with the contract + +Next to the contract name you can find the address where the contract is deployed. The “Interact” section displays the +forms to interact with the `getTotalMessages` and `sendMessage` functions. + +![Remix interact zkSync contract](/images/101-quickstart/101-remix-interact.png) + +Write a message in the form, click the “sendMessage” button and confirm the transaction in your wallet. Once processed, +click the `getTotalMessages` and check the response in the terminal, which should be `1`. The `getLastMessage` function +should also return the message you just sent. That means the contract is storing the messages as expected! But how can +we see the replies from Zeek? diff --git a/content/00.build/05.quick-start/_dir.yml b/content/00.build/05.quick-start/_dir.yml new file mode 100644 index 00000000..a196aaa0 --- /dev/null +++ b/content/00.build/05.quick-start/_dir.yml @@ -0,0 +1 @@ +title: Quickstart diff --git a/content/00.build/05.quick-start/_erc20_tutorial/_atlas_erc20_tutorial.md b/content/00.build/05.quick-start/_erc20_tutorial/_atlas_erc20_tutorial.md new file mode 100644 index 00000000..8fe1ec0e --- /dev/null +++ b/content/00.build/05.quick-start/_erc20_tutorial/_atlas_erc20_tutorial.md @@ -0,0 +1,134 @@ +--- +title: ERC20 token with Atlas +--- +## Custom ERC20 token code + +ERC20 tokens are a standard for fungible tokens, which can be traded and represent a fixed value. You’ve used ERC20 +tokens if you’ve transacted with USDC, DAI, USDT, LINK or UNI. + +The ERC20 token we’re going to deploy will allow users to mint and burn tokens. The entire smart contract code is as +follows: + +```solidity +// SPDX-License-Identifier: Unlicensed +pragma solidity ^0.8.19; + +import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import "@openzeppelin/contracts/access/Ownable.sol"; +import "@openzeppelin/contracts/token/ERC20/extensions/ERC20Burnable.sol"; + +contract TestToken is ERC20, Ownable, ERC20Burnable { + constructor(string memory name, string memory symbol) ERC20(name, symbol) { + _mint(msg.sender, 100 * 10 ** decimals()); + } + + function mint(address to, uint256 amount) public onlyOwner { + _mint(to, amount); + } +} +``` + +::callout{icon="i-heroicons-light-bulb"} +zkSync is [EVM compatible](/build/resources/glossary#evm-compatible), so you can use existing popular libraries like OpenZeppelin. +:: + +The most important features are: + +- `Ownable` : this extension sets the deployer account as owner of the smart contract. It also introduces the + `onlyOwner` modifier that restricts the execution of certain functions to the owner of the contract. +- `ERC20Burnable`: this extension adds the `burn` and `burnFrom` functions to the smart contract. These functions + destroy tokens from a given account. +- `constructor`: called on deployment, the constructor will assign the given name and symbol to the token and mint 100 + units of it to the account that deployed the contract. +- `mint` : this function creates new token units to a given account. It uses the `onlyOwner` modifier so it can only be + called from the owner account. + +## Deploy the smart contract + +Atlas is a browser-based IDE with an integrated AI assistant that allows you to write, test and deploy smart contracts +directly from your browser. Click the button below to open the project in Atlas. + +:u-button{ icon="i-heroicons-code-bracket" size="lg" color="primary" variant="solid" :trailing="false" +to="https://app.atlaszk.com/projects?template=https://github.com/zkSync-Community-Hub/zksync-quickstart-atlas&open=/contracts/TestToken.sol&chainId=%%zk_testnet_chain_id%%" +target="_blank" label="Open smart contract in Atlas"} + +You can see the contract in the Atlas code editor. In the right sidebar, +make sure the selected network is “%%zk_testnet_name%%“ +and click on **"Deploy"** to trigger the smart contract compilation and deployment. + +::callout{icon="i-heroicons-light-bulb"} +Behind the scenes, Atlas is using the zkSync Era custom solidity compiler +(named `zksolc` ) to generate ZKEVM compatible bytecode. [Learn more about zkSync custom compilers](/zk-stack/components/compiler/toolchain). +:: + +![ERC20 interact script in Atlas](/images/101-erc20/atlas-deploy-erc20.png) + +Once compiled sign the transaction with your wallet and wait until its processed. You’ll see the contract in the +**“Deployed contracts”** section. + +## Interact with the ERC20 contract + +In the `scripts` folder you can find the `mint-token.ts` script containing the following code: + +```ts +import { AtlasEnvironment } from "atlas-ide"; +import TokenArtifact from "../artifacts/TestToken"; +import * as ethers from "ethers"; + +// Address of the ERC20 token contract +const TOKEN_CONTRACT_ADDRESS = "" +// Wallet that will receive tokens +const RECEIVER_WALLET = ""; +// Amount of tokens to mint in ETH format, e.g. 1.23 +const TOKEN_AMOUNT = ""; + +export async function main (atlas: AtlasEnvironment) { + const provider = new ethers.providers.Web3Provider(atlas.provider); + const wallet = provider.getSigner(); + + // initialise token contract with address, abi and signer + const tokenContract= new ethers.Contract( + TOKEN_CONTRACT_ADDRESS, + TokenArtifact.TestToken.abi, + wallet + ); + + console.log("Minting tokens..."); + const tx = await tokenContract.mint( + RECEIVER_WALLET, + ethers.utils.parseEther(TOKEN_AMOUNT) + ); + await tx.wait(); + + + console.log("Success!"); + console.log(` + The account ${RECEIVER_WALLET} now has + ${await tokenContract.balanceOf(RECEIVER_WALLET)} tokens` + ); + +} +``` + +This scripts uses `ethers` to interact with the contract we’ve just deployed. + +::callout{icon="i-heroicons-light-bulb"} +Existing libraries like `ethers` , `viem` and `web3.js` can be used to interact with smart contracts deployed on zkSync. +:: + +Fill the following variables: + +- `TOKEN_CONTRACT_ADDRESS`: the contract address of the ERC20 token we just deployed. +- `RECEIVER_WALLET`: address of a different account that will receive new tokens. +- `TOKEN_AMOUNT`: the amount of tokens we’ll send to the account. + +With the `mint-token.ts` file open in the Atlas editor, click on the “Deploy” button to run the script and see the output +in the terminal. + +![ERC20 interact script in Atlas](/images/101-erc20/atlas-erc20-interact.png) + +To confirm the account has received the tokens, visit the [%%zk_testnet_name%% +explorer](%%zk_testnet_block_explorer_url%%) and search the receiver wallet +address. You’ll see the new token balance in the assets table: + +![ERC20 tokens in account balance](/images/101-erc20/erc20-tokens-minted.png) diff --git a/content/00.build/05.quick-start/_erc20_tutorial/_remix_erc20_tutorial.md b/content/00.build/05.quick-start/_erc20_tutorial/_remix_erc20_tutorial.md new file mode 100644 index 00000000..56bbf572 --- /dev/null +++ b/content/00.build/05.quick-start/_erc20_tutorial/_remix_erc20_tutorial.md @@ -0,0 +1,149 @@ +--- +title: ERC20 with Remix +--- +## Custom ERC20 token code + +ERC20 tokens are a standard for fungible tokens, which can be traded and represent a fixed value. You’ve used ERC20 +tokens if you’ve transacted with USDC, DAI, USDT, LINK or UNI. + +The ERC20 token we’re going to deploy will allow users to mint and burn tokens. The entire smart contract code is as +follows: + +```solidity +// SPDX-License-Identifier: Unlicensed +pragma solidity ^0.8.19; + +import "@openzeppelin/contracts/token/ERC20/ERC20.sol"; +import "@openzeppelin/contracts/access/Ownable.sol"; +import "@openzeppelin/contracts/token/ERC20/extensions/ERC20Burnable.sol"; + +contract TestToken is ERC20, Ownable, ERC20Burnable { + constructor(string memory name, string memory symbol) + ERC20(name, symbol) Ownable(msg.sender) { + _mint(msg.sender, 100 * 10 ** decimals()); + } + + function mint(address to, uint256 amount) public onlyOwner { + _mint(to, amount); + } +} +``` + +::callout{icon="i-heroicons-light-bulb"} +zkSync is [EVM compatible](/build/resources/glossary#evm-compatible), so you can use existing popular libraries like OpenZeppelin. +:: + +The most important features are: + +- `Ownable` : this extension sets the deployer account as owner of the smart contract. It also introduces the + `onlyOwner` modifier that restricts the execution of certain functions to the owner of the contract. +- `ERC20Burnable`: this extension adds the `burn` and `burnFrom` functions to the smart contract. These functions + destroy tokens from a given account. +- `constructor`: called on deployment, the constructor will assign the given name and symbol to the token and mint 100 + units of it to the account that deployed the contract. +- `mint` : this function creates new token units to a given account. It uses the `onlyOwner` modifier so it can only be + called from the owner account. + +## Deploy the smart contract + +The Remix IDE is an open-source web and desktop application that supports Ethereum smart contract development and +deployment, offering tools for writing, testing, debugging, and deploying smart contracts written in Solidity to EVM +compatible protocols. + +### Enable the Remix zkSync plugin + +:display-partial{path="/_partials/_enable-remix-zksync-plugin"} + +To open this project in Remix, use the “Clone” option from the file explorer to import it from the following GitHub +repository:`https://github.com/zkSync-Community-Hub/zksync-quickstart-remix` + +![Clone repo in Remix](/images/remix-plugin-clone-repo.gif) + +Once the project is imported, open the `contracts/TestToken.sol` file. To compile the contract, click on the zkSync +plugin on the left menu and then "Compile TestToken.sol". If you get a popup message requesting permissions to access +**`ACCESS TO "WRITEFILE" OF "FILE MANAGER"`,** click on Accept. + +::callout{icon="i-heroicons-light-bulb"} +Behind the scenes, Remix is using the zkSync Era custom Solidity compiler (named `zksolc` ) to generate ZKEVM compatible +bytecode. +[Learn more about zkSync custom compilers](/zk-stack/components/compiler/toolchain). +:: + +We will use our wallet’s configured network to connect and deploy our smart contract so make sure your wallet is +currently connected to the %%zk_testnet_name%%. In Remix, under the Environment Section, select “Wallet” and click on +“Connect Wallet”. + +To deploy the contract, click on “Deploy” and sign the transaction on your wallet. Congratulations, your ERC20 token +contract is now deployed on %%zk_testnet_name%%! + +## Interact with the ERC20 contract + +In the `scripts` folder you can find the `mint-token.ts` script containing the following code: + +```typescript +import {ethers} from "ethers"; + +// Address of the ERC20 token contract +const TOKEN_CONTRACT_ADDRESS = "" +// Wallet that will receive tokens +const RECEIVER_WALLET = ""; +// Amount of tokens to mint in ETH format, e.g. 1.23 +const TOKEN_AMOUNT = "123.55"; + +(async () => { + try { + + // Note that the script needs the ABI which is generated from the compilation artifact. + // Make sure contract is compiled for zkSync and artifacts are generated + const artifactsPath = `browser/contracts/artifacts/TestToken.json` // Change this for different path + + const metadata = JSON.parse(await remix.call('fileManager', 'getFile', artifactsPath)) + + // 'web3Provider' is a remix global variable object + const signer = (new ethers.providers.Web3Provider(web3Provider)).getSigner(0) + + // initialise token contract with address, abi and signer + const tokenContract= new ethers.Contract(TOKEN_CONTRACT_ADDRESS, metadata.abi, signer); + + console.log("Minting tokens..."); + const tx = await tokenContract.mint( + RECEIVER_WALLET, + ethers.utils.parseEther(TOKEN_AMOUNT) + ); + console.log(`Mint transaction is ${tx.hash}`) + await tx.wait(); + console.log("Success!"); + + const balance = await tokenContract.balanceOf(RECEIVER_WALLET) + + console.log(`The account ${RECEIVER_WALLET} now has ${balance} tokens`) + + } catch (e) { + console.log(e.message) + } +})() + +``` + +This scripts uses `ethers` to interact with the contract we’ve just deployed. + +::callout{icon="i-heroicons-light-bulb"} +Existing libraries like `ethers` , `viem` and `web3.js` can be used to interact with smart contracts deployed on zkSync. +:: + +Fill the following variables: + +- `TOKEN_CONTRACT_ADDRESS`: the contract address of the ERC20 token we just deployed. +- `RECEIVER_WALLET`: address of a different account that will receive new tokens. +- `TOKEN_AMOUNT`: the amount of tokens we’ll send to the account. + +With the `mint-token.ts` file open in the editor, click on the “▶️” button to run the script and see the output in the +terminal. + +![ERC20 interact script in Remix](/images/101-erc20/remix-erc20-interact.png) + +To confirm the account has received the tokens, visit the [%%zk_testnet_name%% +explorer](%%zk_testnet_block_explorer_url%%) and search the receiver wallet +address. You’ll see the new token balance in the assets table: + +![ERC20 tokens in account balance](/images/101-erc20/erc20-tokens-minted.png) diff --git a/content/00.build/05.quick-start/_paymaster_intro/_atlas_paymaster_intro.md b/content/00.build/05.quick-start/_paymaster_intro/_atlas_paymaster_intro.md new file mode 100644 index 00000000..b9590111 --- /dev/null +++ b/content/00.build/05.quick-start/_paymaster_intro/_atlas_paymaster_intro.md @@ -0,0 +1,133 @@ +--- +title: Paymaster with Atlas +--- + +Click the following button to open the project in Atlas: + +:u-button{ icon="i-heroicons-code-bracket" size="lg" color="primary" variant="solid" :trailing="false" +to="https://app.atlaszk.com/projects?template=https://github.com/zkSync-Community-Hub/zksync-quickstart-atlas&open=/scripts/paymaster-transaction.ts&chainId=%%zk_testnet_chain_id%%" +target="_blank" label="Open script in Atlas"} + +It’ll open the script to send a transaction via the paymaster. Let’s go through the most important parts: + +### Retrieve the token balance + +```typescript +// retrieve and print the current balance of the wallet +let ethBalance = await provider.getBalance(walletAddress) +let tokenBalance = await tokenContract.balanceOf(walletAddress) +console.log(`Account ${walletAddress} has ${ethers.utils.formatEther(ethBalance)} ETH`); +console.log(`Account ${walletAddress} has ${ethers.utils.formatUnits(tokenBalance, 18)} tokens`); +``` + +In this part we’re retrieving the ETH and ERC20 token balances of the account. We’ll compare them after the transaction +is executed to see the difference. + +### Estimate transaction fee + +```typescript +// retrieve the testnet paymaster address +const testnetPaymasterAddress = await zkProvider.getTestnetPaymasterAddress(); + +console.log(`Testnet paymaster address is ${testnetPaymasterAddress}`); + +const gasPrice = await provider.getGasPrice(); + +// define paymaster parameters for gas estimation +const paramsForFeeEstimation = utils.getPaymasterParams(testnetPaymasterAddress, { + type: "ApprovalBased", + token: TOKEN_CONTRACT_ADDRESS, + // set minimalAllowance to 1 for estimation + minimalAllowance: ethers.BigNumber.from(1), + // empty bytes as testnet paymaster does not use innerInput + innerInput: new Uint8Array(0), +}); + +// estimate gasLimit via paymaster +const gasLimit = await messagesContract.estimateGas.sendMessage(NEW_MESSAGE, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paramsForFeeEstimation, + }, +}); + +// fee calculated in ETH will be the same in +// ERC20 token using the testnet paymaster +const fee = gasPrice * gasLimit; +``` + +1. Retrieve the testnet paymaster address. +2. Generate the paymaster parameters to estimate the transaction fees passing the paymaster address, token address, and + `ApprovalBased` as the paymaster flow type. +3. Retrieve the `gasLimit` of sending the transaction with the paymaster params. +4. Calculate the final estimated fee which is equal to `gasPrice` multiplied by `gasLimit`. + +### Send the transaction + +```typescript +// new paymaster params with fee as minimalAllowance + const paymasterParams = utils.getPaymasterParams(testnetPaymasterAddress, { + type: "ApprovalBased", + token: TOKEN_CONTRACT_ADDRESS, + // provide estimated fee as allowance + minimalAllowance: fee, + // empty bytes as testnet paymaster does not use innerInput + innerInput: new Uint8Array(0), + }); + + // full overrides object including maxFeePerGas and maxPriorityFeePerGas + const txOverrides = { + maxFeePerGas: gasPrice, + maxPriorityFeePerGas: "1", + gasLimit, + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams, + } + } + + console.log(`Sign the transaction in your wallet`); + + // send transaction with additional paymaster params as overrides + const txHandle = await messagesContract.sendMessage(NEW_MESSAGE, txOverrides); +``` + +1. Create the new paymaster params with the calculated `fee` as `minimalAllowance` . +2. Complete the transaction overrides object with `maxFeePerGas`, `maxPriorityFeePerGas` and `gasPerPubdata` +3. Send the transaction including the `txOverrides` + +### Compare the final balance + +```typescript +ethBalance = await provider.getBalance(walletAddress) +tokenBalance = await tokenContract.balanceOf(walletAddress) +console.log(`Account ${walletAddress} now has ${ethers.utils.formatEther(ethBalance)} ETH`); +console.log(`Account ${walletAddress} now has ${ethers.utils.formatUnits(tokenBalance, 18)} tokens`); +``` + +Finally we retrieve and print the ETH and ERC20 balances to see how they’ve changed. + +## Run the script + +To run the script, first enter the addresses of the `ZeekSecretMessages.sol` and `TestToken.sol` contracts that we +deployed previously ([Deploy your first contract](/build/quick-start/deploy-your-first-contract) and +[Erc20 Token](/build/quick-start/erc20-token)) in the following variables at the beginning of +the script: + +```typescript +// Address of the ZeekMessages contract +const ZEEK_MESSAGES_CONTRACT_ADDRESS = ""; +// Address of the ERC20 token contract +const TOKEN_CONTRACT_ADDRESS = "" +// Message to be sent to the contract +const NEW_MESSAGE = "This tx cost me no ETH!"; +``` + +Next, make sure the script file is selected in the Atlas editor and click on the “Deploy” button. + +![ERC20 interact script in Remix](/images/101-paymasters/atlas-paymaster-script.png) + +You’ll see the progress in the console. + +If everything worked as expected, only the ERC20 balance will decrease, meaning the fee was paid with the ERC20 token +instead of ETH. diff --git a/content/00.build/05.quick-start/_paymaster_intro/_remix_paymaster_intro.md b/content/00.build/05.quick-start/_paymaster_intro/_remix_paymaster_intro.md new file mode 100644 index 00000000..cec9f9c6 --- /dev/null +++ b/content/00.build/05.quick-start/_paymaster_intro/_remix_paymaster_intro.md @@ -0,0 +1,139 @@ +--- +title: Paymaster with Remix +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Remix does not support `zksync-ethers` yet so you can not use it to run this script. Use Atlas instead. +:: + +To open the project in Remix, use the “Clone” option from the file explorer to import it from the following GitHub +repository:`https://github.com/zkSync-Community-Hub/zksync-quickstart-remix` + +![Clone repo in Remix](/images/remix-plugin-clone-repo.gif) + +Once the project is imported, open the `scripts/paymaster-transaction.ts` file, which contains the code to send a +transaction via the paymaster. Let’s go through the most important parts: + +### Retrieve the token balance + +```typescript +// retrieve and print the current balance of the wallet +let ethBalance = await provider.getBalance(walletAddress) +let tokenBalance = await tokenContract.balanceOf(walletAddress) +console.log(`Account ${walletAddress} has ${ethers.utils.formatEther(ethBalance)} ETH`); +console.log(`Account ${walletAddress} has ${ethers.utils.formatUnits(tokenBalance, 18)} tokens`); +``` + +In this part we’re retrieving the ETH and ERC20 token balances of the account. We’ll compare them after the transaction +is executed to see the difference. + +### Estimate transaction fee + +```typescript +// retrieve the testnet paymaster address +const testnetPaymasterAddress = await zkProvider.getTestnetPaymasterAddress(); + +console.log(`Testnet paymaster address is ${testnetPaymasterAddress}`); + +const gasPrice = await provider.getGasPrice(); + +// define paymaster parameters for gas estimation +const paramsForFeeEstimation = utils.getPaymasterParams(testnetPaymasterAddress, { + type: "ApprovalBased", + token: TOKEN_CONTRACT_ADDRESS, + // set minimalAllowance to 1 for estimation + minimalAllowance: ethers.BigNumber.from(1), + // empty bytes as testnet paymaster does not use innerInput + innerInput: new Uint8Array(0), +}); + +// estimate gasLimit via paymaster +const gasLimit = await messagesContract.estimateGas.sendMessage(NEW_MESSAGE, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paramsForFeeEstimation, + }, +}); + +// fee calculated in ETH will be the same in +// ERC20 token using the testnet paymaster +const fee = gasPrice * gasLimit; +``` + +In this part of the script we: + +1. Retrieve the testnet paymaster address. +2. Generate the paymaster parameters to estimate the transaction fees passing the paymaster address, token address, and + `ApprovalBased` as the paymaster flow type. +3. Retrieve the `gasLimit` of sending the transaction with the paymaster params. +4. Calculate the final estimated fee which is equal to `gasPrice` multiplied by `gasLimit`. + +### Send the transaction + +```typescript +// new paymaster params with fee as minimalAllowance +const paymasterParams = utils.getPaymasterParams(testnetPaymasterAddress, { + type: "ApprovalBased", + token: TOKEN_CONTRACT_ADDRESS, + // provide estimated fee as allowance + minimalAllowance: fee, + // empty bytes as testnet paymaster does not use innerInput + innerInput: new Uint8Array(0), +}); + +// full overrides object including maxFeePerGas and maxPriorityFeePerGas +const txOverrides = { + maxFeePerGas: gasPrice, + maxPriorityFeePerGas: "1", + gasLimit, + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams, + } +} + +console.log(`Sign the transaction in your wallet`); + +// send transaction with additional paymaster params as overrides +const txHandle = await messagesContract.sendMessage(NEW_MESSAGE, txOverrides); +``` + +1. Create the new paymaster params with the calculated `fee` as `minimalAllowance` . +2. Complete the transaction overrides object with `maxFeePerGas`, `maxPriorityFeePerGas` and `gasPerPubdata` +3. Send the transaction including the `txOverrides` + +### Compare the final balance + +```typescript +ethBalance = await provider.getBalance(walletAddress) +tokenBalance = await tokenContract.balanceOf(walletAddress) +console.log(`Account ${walletAddress} now has ${ethers.utils.formatEther(ethBalance)} ETH`); +console.log(`Account ${walletAddress} now has ${ethers.utils.formatUnits(tokenBalance, 18)} tokens`); +``` + +Finally we retrieve and print the ETH and ERC20 balances to see how they’ve changed. + +## Run the script + +To run the script, first enter the addresses of the `ZeekSecretMessages.sol` and `TestToken.sol` contracts that we +deployed previously ([Deploy your first contract](/build/quick-start/deploy-your-first-contract) and +[Erc20 Token](/build/quick-start/erc20-token)) in the following variables at the beginning of +the script: + +```typescript +// Address of the ZeekMessages contract +const ZEEK_MESSAGES_CONTRACT_ADDRESS = ""; +// Address of the ERC20 token contract +const TOKEN_CONTRACT_ADDRESS = "" +// Message to be sent to the contract +const NEW_MESSAGE = "This tx cost me no ETH!"; +``` + +Next, make sure the script file is selected in the Remix editor and click on the “▶️” button. + + + +You’ll see the progress in the console. + +If everything worked as expected, only the ERC20 balance will decrease, meaning the fee was paid with the ERC20 token +instead of ETH. diff --git a/content/00.build/10.zksync-101/00.index.md b/content/00.build/10.zksync-101/00.index.md new file mode 100644 index 00000000..f2891f11 --- /dev/null +++ b/content/00.build/10.zksync-101/00.index.md @@ -0,0 +1,141 @@ +--- +title: Getting started +description: Learn to build and deploy smart contracts on zkSync Era. +--- + +Welcome to zkSync 101 for deploying smart contracts on zkSync! +In this guide, we'll walk you through the process of creating and deploying a simple smart contract that creates a crowdfunding campaign for Zeek. + +By the end of the series, you will learn integral pieces that will +help you become a great developer on zkSync! You will learn how to: + +:check-icon Initialize a new project with zksync-cli. + +:check-icon Deploy contracts on zkSync Era using Hardhat or Foundry. + +:check-icon Test your contracts with Hardhat or Foundry. + +:check-icon Implement upgradable patterns for your contracts. + +:check-icon Implement a Paymaster to simplify transactions for your users. + +--- + +## Install zksync-cli + +Our Quickstart series utilizes the `zksync-cli` to help you develop and interact with zkSync from your local machine. Check out our +[zksync-cli section](/build/tooling/zksync-cli) to learn more on how to use the CLI. + +You will need to install a couple tools to effectively use `zksync-cli`: + +#### Install Node.js or Bun.sh + +You will need either Node.js or Bun.sh. +The choice depends on your project's requirements and personal preference for package management and execution speed. +If you are unfamiliar with both, choose Node.js. + +- Node.js + - Download the Long-Term Support (LTS) version from the [official Node.js website](https://nodejs.org/en/download). + - For first-time users, the [Node.js usage guide](https://nodejs.org/api/synopsis.html#usage) + offers comprehensive instructions on getting started. +- Bun.sh + - Obtain the latest version from the [Bun installation page](https://bun.sh/docs/installation). + Bun.sh is known for its high performance and modern JavaScript features. + +### Setup era local node (optional) + +Our Quickstart series will have you compile and deploy contracts to +%%zk_testnet_name%% which requires you to have ETH in your wallet for funding transactions. +Alternatively, our `zksync-cli` tool provides a way for you to setup a test node locally. +This era local node allows for quicker testing and debugging processes without incurring testnet transaction costs. + +#### Install Docker + +The era local node will need Docker to run locally on your machine. +Download the appropriate version from the [Docker website](https://docs.docker.com/engine/install/). + +#### Run a local zkSync Era node + +Run the following command in your terminal: + +```bash +zksync-cli dev start +``` + +Choose "In memory node" to deploy a local zkSync Era node in a Docker container. + +The local era node will also include pre-configured rich wallets for use, visit [era-test-node rich wallets](/build/test-and-debug/in-memory-node#pre-configured-rich-wallets). + +Your local zkSync Era node is accessible at **[http://127.0.0.1:8011](http://127.0.0.1:8011/)**, ready for deployment or testing purposes. +Leave this terminal open and running as you build your projects. +When you are done running your local Era node, you can stop it with `Ctrl+C`. + +--- + +## Choose Hardhat or Foundry + +Our Quickstart series provides two options for your learning process using +either Hardhat or Foundry. Pick one to use and stick with as you go through +each of the guides. + + + +### Install foundry-zksync + +If you choose to use Foundry for the Quick Start series, you will need to +install the `foundry-zksync` tool. This tool is a specialized fork of Foundry, tailored for zkSync. +It extends Foundry's capabilities for Ethereum app development to support zkSync, +allowing for the compilation, deployment, testing, and interaction with smart contracts on zkSync. + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +`foundry-zksync` is still in an alpha stage, so some features might not be fully supported +yet and may not work as fully intended. It is open-sourced and contributions are welcome. +:: + +Quickly set up `foundry-zksync` by following these steps: + +1. **Clone the Repository**: + Download the latest version from GitHub: + + ```bash + git clone git@github.com:matter-labs/foundry-zksync.git + cd foundry-zksync + ``` + +2. **Run the Installer**: + Execute the script to install the `foundry-zksync` binaries `forge` and `cast`: + + ```bash + ./install-foundry-zksync + ``` + +### Private key setup with Foundry keystore + +:display-partial{ path="_partials/_foundry-create-keystore" } + +--- + +## Fund your wallet + +If you did not set up a local era node for development and plan to use %%zk_testnet_name%%, you will need testnet ETH to fund transactions. + +1. Obtaining Testnet ETH: + + - Use the [LearnWeb3 faucet](https://learnweb3.io/faucets/zksync_sepolia/) + to directly receive testnet ETH on %%zk_testnet_name%%. + - Alternatively, acquire SepoliaETH from [recommended faucets](/ecosystem/network-faucets) + transfer it to the %%zk_testnet_name%% via the [zkSync bridge](https://portal.zksync.io/bridge/?network=sepolia). + +2. Verify your balance: + + - Check your wallet's balance using the [%%zk_testnet_name%% explorer](%%zk_testnet_block_explorer_url%%). + +--- + +## Next Steps + +You should now have a fully working local environment to build new projects on zkSync! + +- Continue to [Hello zkSync!](/build/zksync-101/hello-zksync) to begin the series on building a crowdfunding campaign for Zeek. +- This setup provides you everything you need to build in zkSync. +You can skip on to creating your own projects using `zksync-cli` with your fully set up local dev environment. diff --git a/content/00.build/10.zksync-101/10.hello-zksync.md b/content/00.build/10.zksync-101/10.hello-zksync.md new file mode 100644 index 00000000..42605e99 --- /dev/null +++ b/content/00.build/10.zksync-101/10.hello-zksync.md @@ -0,0 +1,58 @@ +--- +title: Hello zkSync! +description: Learn to deploy smart contracts efficiently in the zkSync environment. +--- + +Welcome to the zkSync 101 guide for deploying smart contracts on zkSync! In this series, we'll walk you through the process +of creating and deploying a simple smart contract that creates a crowdfunding campaign for Zeek. In this section you will learn the following: + +:check-icon Initialize a new project with zksync-cli. + +:check-icon Craft a smart contract to fund Zeek's latest adventure. + +:check-icon Deploy the contract on the zkSync Era using your choice of Hardhat or Foundry. + +Let's dive in and start your developer journey on zkSync! + +--- + +## Prerequisites + +This series requires some initial setup of tools to elevate your +development experience building for zkSync. +Make sure to go through the setup provided in the initial [Getting started](/build/zksync-101) section. + +Select the framework you want to get started using zkSync Era with. + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_hello-zksync/_hardhat_deploy_contract' +}, { + label: 'Foundry', + partial: '_hello-zksync/_foundry_deploy_contract' +}] +--- +:: + +## Takeaways + +- **EVM Compatibility:** zkSync is EVM compatible and you can write smart contracts in Solidity or Vyper. +- **Custom Compilation:** Contracts deployed to zkSync are compiled using `zksolc` or `zkvyper` as +they generate a special bytecode for zkSync's ZKEVM. +- **Development Tools:** zkSync supports your favorite development toolkit Hardhat and Foundry. + +## Next steps + +Having successfully deployed your first contract on zkSync, you're well on your way to becoming +a proficient zkSync developer. To expand your expertise: + +- **Explore Contract Factories:** Enhance your project by building a contract factory +for the `CrowdfundingCampaign` contract in the [next guide](/build/zksync-101/contract-factory). This will allow you to efficiently +manage multiple crowdfunding campaigns, each with its own unique parameters. +- **Dive Deeper into zkSync Features:** Investigate advanced zkSync features such as account abstraction, +and paymasters. +- **Join the Community:** Engage with the zkSync developer community through forums, +Discord channels, Dev Discussions, or GitHub repositories. Share your experiences, ask questions, +and collaborate on projects. diff --git a/content/00.build/10.zksync-101/20.contract-factory.md b/content/00.build/10.zksync-101/20.contract-factory.md new file mode 100644 index 00000000..ea9d0235 --- /dev/null +++ b/content/00.build/10.zksync-101/20.contract-factory.md @@ -0,0 +1,67 @@ +--- +title: Contract Factory +description: Learn how to deploy and manage multiple smart contracts on zkSync using a contract factory. +--- + +This second zkSync 101 installment advances from your introductory exploration of smart contract deployment to dive into the utility of contract factories. +Through this guide, you'll learn how to streamline the deployment of multiple crowdfunding campaigns using a single contract factory, leveraging the +foundational `CrowdfundingCampaign` contract in the first guide. + +:check-icon Advance your zkSync development journey with contract factories. + +:check-icon Construct a contract factory to create multiple crowdfunding campaigns. + +:check-icon Seamlessly deploy your contract factory on zkSync Era, using either Hardhat or Foundry. + +Let's explore the efficiency and scalability that contract factories bring. + +## What is a contract factory? + +A contract factory is a design pattern that allows for the creation of multiple +contract instances from a single "factory" contract. It's essentially a contract +that creates other contracts, streamlining and organizing the deployment of +numerous similar contracts efficiently. + +--- + +## Setup the project + +Select the framework you want to get started using zkSync Era with. + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_deploy_factory/_hardhat_deploy_contract_factory' +}, { + label: 'Foundry', + partial: '_deploy_factory/_foundry_deploy_contract_factory' +}] +--- +:: + +## Takeaways + +- **Contract Factories:** Utilizing contract factories significantly streamlines +the deployment process, allowing for the creation of multiple instances of a +contract, like the `CrowdfundingCampaign`, with varied parameters. +- **Scalability and Management:** Contract factories offer a scalable solution to manage +numerous contract instances, enhancing project organization and efficiency. +- **Event-Driven Insights:** The `CampaignCreated` event in the factory contract provides +a transparent mechanism to track each crowdfunding campaign's deployment, useful for +off-chain monitoring and interaction. + +## Next steps + +With the contract factory in your zkSync development arsenal, you're set to elevate +your smart contract projects. Here's how you can further your journey: + +- **Contract Testing:** Progress to the next guide focused on [testing your contracts](/build/zksync-101/testing). +Ensuring the reliability and security of your `CrowdfundingCampaign` through +comprehensive tests is critical. +- **Advanced zkSync Integrations:** Explore deeper into zkSync's ecosystem by +implementing features like account abstraction and paymasters to enhance user +experience and contract flexibility. +- **Community Engagement and Contribution:** Join the vibrant zkSync community. +Participate in forums, Discord, or GitHub discussions. Sharing insights, asking queries, +and contributing can enrich the ecosystem and your understanding of zkSync. diff --git a/content/00.build/10.zksync-101/30.testing.md b/content/00.build/10.zksync-101/30.testing.md new file mode 100644 index 00000000..9b4bed65 --- /dev/null +++ b/content/00.build/10.zksync-101/30.testing.md @@ -0,0 +1,58 @@ +--- +title: Testing +description: Discover how to effectively test smart contracts on zkSync Era ecosystem. +--- + +Welcome back to our zkSync 101 series, your fast-track to zkSync development! In this +third guide, we transition from deploying and managing contracts to the critical phase +of testing. This guide will walk you through the steps to ensure your `CrowdfundingCampaign` +contracts, introduced in our first guide and efficiently deployed through contract factories +in the second, work flawlessly. + +:check-icon Elevate your zkSync toolkit with robust contract testing techniques. + +:check-icon Craft comprehensive tests for your `CrowdfundingCampaign` to ensure reliability and security. + +:check-icon Use Hardhat or Foundry to write and run tests, ensuring your campaigns are ready. + +Dive into the world of smart contract testing and solidify the foundation of your zkSync projects. + +## Setup the project + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_testing/_hardhat_contract_testing' +}, { + label: 'Foundry', + partial: '_testing/_foundry_contract_testing' +}] +--- +:: + +## Takeaways + +- **Testing**: Understanding contract testing is important for ensuring the reliability and security of your smart contracts +on zkSync. Proper testing safeguards against unforeseen errors and vulnerabilities. +- **Comprehensive Coverage**: Achieving comprehensive test coverage, including both positive and negative testing +scenarios, is essential for a robust smart contract. This guide emphasized the `contribute` method, +but testing should encompass all aspects of your contract. +- **Tooling Efficiency**: Leveraging Hardhat or Foundry for testing provides a streamlined and efficient workflow. +These tools offer powerful features and plugins, like `@nomicfoundation/hardhat-chai-matchers`, +that enhance the testing process. + +## Next Steps + +With a solid foundation in contract testing, you're well-equipped to advance your zkSync +development journey. Consider the following steps to further your expertise: + +- **Upgradeability**: Dive into the [Upgradeability article](/build/zksync-101/upgrading) focusing on contract upgradeability. +Learning to make your contracts upgradeable will enable you to update and improve your smart contracts +over time without losing state or funds. +- **Advanced zkSync Integrations:** Explore deeper into zkSync's ecosystem by +implementing features like account abstraction and paymasters to enhance user +experience and contract flexibility. +- **Community Engagement and Contribution:** Join the vibrant zkSync community. +Participate in forums, Discord, or GitHub discussions. Sharing insights, asking queries, +and contributing can enrich the ecosystem and your understanding of zkSync. diff --git a/content/00.build/10.zksync-101/40.upgrading.md b/content/00.build/10.zksync-101/40.upgrading.md new file mode 100644 index 00000000..ba85cb95 --- /dev/null +++ b/content/00.build/10.zksync-101/40.upgrading.md @@ -0,0 +1,57 @@ +--- +title: Upgradability +description: Learn to make smart contracts upgradeable within the zkSync ecosystem. +--- + +In this fourth installment for zkSync 101, we embark on a journey through contract upgradability, +an important aspect for maintaining and enhancing smart contracts over time. This guide will +lead you through the strategies and practices for making the `CrowdfundingCampaign` contract **upgradeable**. + +:check-icon Harness advanced techniques for contract upgradability in zkSync. + +:check-icon Implement upgradeable patterns for the `CrowdfundingCampaign` to ensure long-term adaptability and improvement. + +:check-icon Leverage tools and best practices in zkSync to facilitate seamless contract upgrades. + +Begin to understand smart contract evolution and empower your zkSync applications with the +flexibility of upgradability. + +Select your preferred upgrade mechanism: + +::content-switcher +--- +items: [{ + label: 'Transparent', + partial: '_upgrading/_transparent_proxy_contract_upgradability' +}, { + label: 'Beacon', + partial: '_upgrading/_beacon_proxy_contract_upgradability' +}, + { + label: 'UUPS', + partial: '_upgrading/_uups_contract_upgradability' +},] +--- +:: + +## Takeaways + +- **Upgradability:** The guide highlights the critical aspect of smart contract upgradability, introducing techniques +for using transparent, beacon, and UUPS proxies. +This ensures your contracts remain adaptable, allowing for seamless updates to business logic or enhancements in efficiency. + +- **Flexibility:** Emphasizing flexibility, the guide demonstrates how upgradable +contracts maintain continuity of state and fund security, even as underlying +functionalities evolve. This approach provides a resilient framework for your dApps to grow and adapt over time. + +## Next Steps + +- **Exploring Paymasters:** Continue on to the next guide focused on [using paymasters](/build/zksync-101/paymaster) +with your smart contracts. Paymasters abstract gas payments in transactions, +offering new models for transaction fee management and enhancing user experience in dApps. +- **Advanced zkSync Integrations:** Explore deeper into zkSync's ecosystem by +implementing features like account abstraction and paymasters to enhance user +experience and contract flexibility. +- **Community Engagement and Contribution:** Join the vibrant zkSync community. +Participate in forums, Discord, or GitHub discussions. Sharing insights, asking queries, +and contributing can enrich the ecosystem and your understanding of zkSync. diff --git a/content/00.build/10.zksync-101/50.paymaster.md b/content/00.build/10.zksync-101/50.paymaster.md new file mode 100644 index 00000000..dc87b391 --- /dev/null +++ b/content/00.build/10.zksync-101/50.paymaster.md @@ -0,0 +1,96 @@ +--- +title: Paymaster +description: Implement a paymaster flow into your project. +--- + +Welcome to the final part of our zkSync 101 series on mastering zkSync development! +In this guide, we move beyond the basics +of smart contract deployment and the creation of contract factories to explore the innovative concept of paymasters +in the zkSync ecosystem. This guide will illuminate the power of paymasters to revolutionize transaction +fee management and enhance user experiences within your dApps. + +:check-icon Delve deeper into zkSync development with the introduction of paymasters. + +:check-icon Learn how paymasters can cover transaction fees for your dApp users, enhancing accessibility and user experience. + +:check-icon Discover the flexibility of fee payment with paymasters, including the ability to pay +fees in ERC20 tokens on zkSync Era, using Hardhat or Foundry. + +Embark on this journey to understand how paymasters can add a new layer of functionality and user-friendliness +to your decentralized applications. + +## What is a Paymaster? + +Paymasters in the zkSync ecosystem represent a groundbreaking approach to handling transaction fees. +They are special accounts designed to subsidize transaction costs for other accounts, +potentially making certain transactions free for end-users. +This feature is particularly useful for dApp developers looking +to improve their platform's accessibility and user experience by covering transaction fees on behalf of their users. + +## Built-in Paymaster Flows + +Paymasters can operate under various flows, some of which may require user interaction, such as setting allowances +for token swaps. These flows enable paymasters to support a wide range of use cases, from simple fee subsidies +to more complex scenarios involving ERC20 token exchanges for transaction fees. + +- **General Paymaster Flow:** This default flow requires no preliminary actions from users, allowing paymasters +to interpret transaction data as needed to cover fees. + +- **Approval-Based Paymaster Flow:** For operations requiring user permissions, such as token allowances, +this flow provides a structured approach. It ensures that user tokens can be seamlessly exchanged for transaction +fees, subject to user-approved limits. + +As we explore paymasters, remember that while they offer enhanced flexibility for fee management, their +implementation should always prioritize security and user trust. This guide aims to equip you with the knowledge +to effectively incorporate paymasters into your zkSync projects, paving the way for more user-friendly and accessible dApps. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The paymaster smart contract code is provided "as-is" without any express or implied warranties. +
+ +- Users are solely responsible for ensuring that their design, implementation, + and use of the paymaster smart contract software complies with all applicable laws, + including but not limited to money transmission, anti-money laundering (AML), and payment processing regulations. + +- The developers and publishers of this software disclaim any liability for any legal issues that may arise from its use. +:: + +--- + +## Paymaster flow + +Select the paymaster type you want to get started using zkSync Era with. + +::content-switcher +--- +items: [{ + label: 'General', + partial: '_paymasters/_general_paymaster_flow' +},{ + label: 'Approval', + partial: '_paymasters/_approval_paymaster_flow' +}] +--- +:: + +## Takeaways + +- **Comprehensive Understanding of Paymaster Contracts:** This guide has provided a detailed look at both the +`ApprovalFlowPaymaster` and the `GeneralPaymaster` contracts, illustrating how they manage transaction fees +in zkSync. These paymasters are pivotal in handling gas payments, offering a more accessible transaction +experience for users. +- **Flexibility and User Empowerment:** By covering the transaction fees through ERC20 tokens or general subsidies, these +paymaster contracts offer significant flexibility and reduce the friction typically associated with on-chain +interactions. This feature enhances user engagement and adoption of dApps. + +## Next Steps + +- **Experiment with Different Paymaster Contracts:** Now that you are familiar with both approval-based and general +paymaster flows, you can experiment with these contracts by deploying them under various conditions +and with different types validations, restrictions and enhancements. +- **Develop a Front-End Interface:** Consider building a user interface that interacts with the paymaster contracts +you have deployed. This will not only improve the usability of your contracts but also provide practical insights +into how end-users interact with your dApps in real-world scenarios. +- **Community Engagement and Contribution:** Join the vibrant zkSync community. +Participate in forums, Discord, or GitHub discussions. Sharing insights, asking queries, +and contributing can enrich the ecosystem and your understanding of zkSync. diff --git a/content/00.build/10.zksync-101/_deploy_factory/_foundry_deploy_contract_factory.md b/content/00.build/10.zksync-101/_deploy_factory/_foundry_deploy_contract_factory.md new file mode 100644 index 00000000..6d75699d --- /dev/null +++ b/content/00.build/10.zksync-101/_deploy_factory/_foundry_deploy_contract_factory.md @@ -0,0 +1,141 @@ +--- +title: Foundry | Deploy Contract Factory +--- + +:display-partial{path = "/_partials/_foundry_alpha_warning"} + +Run the following command in your terminal to initialize the Foundry project. + +```sh +npx zksync-cli@latest create --template qs-fs-factories foundry-contract-factory-quickstart +cd foundry-contract-factory-quickstart +``` + +## Set up your wallet + +:display-partial{path="build/zksync-101/_partials/_setup-wallet"} + +--- + +## Review the CrowdfundingFactory contract + +The `CrowdfundingFactory.sol` we will compile and deploy is provided under the [`/src` directory](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/foundry/factory/src/CrowdfundFactory.sol). + +The `CrowdfundingFactory.sol`contract will be used to deploy multiple instances of +the `CrowdfundingCampaign.sol` contract from the previous guide. + +::drop-panel + ::panel{label="CrowdfundingFactory.sol"} + ```solidity [CrowdfundingFactory.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + // Crowdfunding campaign contract + import "./CrowdfundingCampaign.sol"; + + // Factory contract to create and manage crowdfunding campaigns + contract CrowdfundingFactory { + CrowdfundingCampaign[] public campaigns; + + event CampaignCreated(address campaignAddress, uint256 fundingGoal); + + function createCampaign(uint256 fundingGoal) public { + CrowdfundingCampaign newCampaign = new CrowdfundingCampaign(fundingGoal); + campaigns.push(newCampaign); + + emit CampaignCreated(address(newCampaign), fundingGoal); + } + + function getCampaigns() public view returns (CrowdfundingCampaign[] memory) { + return campaigns; + } + } + ``` + :: +:: + +The `CrowdfundingFactory` contract automates the creation and oversight of +`CrowdfundingCampaign` contracts, each with its distinct funding goals, it features: + +- **Campaign Creation**: Utilizes the `createCampaign` method to initiate a new +`CrowdfundingCampaign` contract. This function takes a `fundingGoal` as an argument, +deploys a new campaign contract with this goal, and tracks the created campaign in the +`campaigns` array. +- **Campaign Tracking**: The `getCampaigns` method offers a view into all the campaigns +created by the factory, allowing for easy access and management of multiple crowdfunding +initiatives. + +This contract factory approach streamlines the deployment of crowdfunding campaigns, +making it efficient to launch and manage multiple campaigns. + +### Compile contract + +Smart contracts deployed to zkSync must be compiled using our custom compiler. +For this particular guide we are making use of `zksolc`. + +To compile the contracts in the project, run the following command: + +```bash +forge build --zksync --use 0.8.20 +``` + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +[⠒] Compiling... +[⠃] Compiling 3 files with 0.8.20 +[⠊] Solc 0.8.20 finished in 336.48ms +Compiler run successful! +Compiling contracts for zkSync Era with zksolc v1.4.0 +``` + +The compiled zkEVM artifacts will be located in the `/zkout` folder, and the solc +artifacts will be located in the `/out` folder. + +--- + +## Deploy a CrowdfundingCampaign with CrowdfundingFactory + +This section outlines the steps to deploy the `CrowdfundingCampaign` contract using +our new `CrowdfundingFactory`. + +1. Let's start by deploying the `CrowdfundingFactory` contract. Execute the following +command: + + ```bash + forge create src/CrowdfundFactory.sol:CrowdfundingFactory --factory-deps src/CrowdfundingCampaign.sol:CrowdfundingCampaign --account myKeystore --sender --rpc-url zkSyncSepoliaTestnet --chain %%zk_testnet_chain_id%% --zksync + # To deploy the contract on local in-memory node: + # forge script script/DeployFactory.s.sol:DeployFactoryAndCreateCampaign --account myKeystore --sender --rpc-url inMemoryNode --broadcast --zksync + ``` + + Upon a successfull deployment you'll receive details of the deploying address, the contract address, + and the transaction hash, like so: + + ```bash + Deployer: 0x89E0Ff69Cc520b55C9F7Bcd3EAC17e81d9bB8dc2 + Deployed to: 0x607545Fd35ef49d7445555ddFa22938fD4Efb219 + Transaction hash: 0x94e7a97bb64c2bacffbd2a47f3c10021a80156d11082c079046a426c99518d28 + ``` + +1. Using the `CrowdfundingFactory` contract address let's deploy our `CrowdfundingCampaign`: + + ```bash + cast send 0x607545Fd35ef49d7445555ddFa22938fD4Efb219 "createCampaign(uint256)" "1" --rpc-url zkSyncSepoliaTestnet --chain %%zk_testnet_chain_id%% --account myKeystore --sender + # To use the contract factory on local in-memory node: + # cast send 0x607545Fd35ef49d7445555ddFa22938fD4Efb219 "createCampaign(uint256)" "1" --rpc-url inMemoryNode --chain 260 --account myKeystore --sender + ``` + + Upon a successfull deployment you'll receive details of the transaction, including the + contract address of our crowdfunding campaign: + + ```bash + blockHash 0x7f8dfcd365b4ba5ac690e94aedb5fdb2bdb5ef12b2ff68672ab58c7a89738161 + blockNumber 1576375 + contractAddress 0x95f83473b88B5599cdB273F976fB3DC66DEA1c1D + ... + ... + ``` + +🌟 Brilliant! Your contract factory and its first crowdfunding campaign are now operational. diff --git a/content/00.build/10.zksync-101/_deploy_factory/_hardhat_deploy_contract_factory.md b/content/00.build/10.zksync-101/_deploy_factory/_hardhat_deploy_contract_factory.md new file mode 100644 index 00000000..f116e9b7 --- /dev/null +++ b/content/00.build/10.zksync-101/_deploy_factory/_hardhat_deploy_contract_factory.md @@ -0,0 +1,190 @@ +--- +title: Hardhat | Deploy Contract Factory +--- +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-factories contract-factory-quickstart +cd contract-factory-quickstart +``` + +## Set up your wallet + +:display-partial{path="build/zksync-101/_partials/_setup-wallet"} + +--- + +## Compile the contracts + +This section will focus on compiling and deploying the `CrowdfundingFactory.sol` +contract that is provided under the [`/contracts` directory](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/factory/contracts/CrowdfundFactory.sol). + +The `CrowdfundingFactory.sol`contract will be used to deploy multiple instances of +the `CrowdfundingCampaign.sol` contract from the previous guide. +This contract factory approach streamlines the deployment of crowdfunding campaigns, +making it efficient to launch and manage multiple campaigns. + +::drop-panel + ::panel{label="CrowdfundingFactory.sol"} + ```solidity [CrowdfundingFactory.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + // Crowdfunding campaign contract + import "./CrowdfundingCampaign.sol"; + + // Factory contract to create and manage crowdfunding campaigns + contract CrowdfundingFactory { + CrowdfundingCampaign[] public campaigns; + + event CampaignCreated(address campaignAddress, uint256 fundingGoal); + + function createCampaign(uint256 fundingGoal) public { + CrowdfundingCampaign newCampaign = new CrowdfundingCampaign(fundingGoal); + campaigns.push(newCampaign); + + emit CampaignCreated(address(newCampaign), fundingGoal); + } + + function getCampaigns() public view returns (CrowdfundingCampaign[] memory) { + return campaigns; + } + } + ``` + :: +:: + +The `CrowdfundingFactory` contract automates the creation and oversight of +`CrowdfundingCampaign` contracts, each with their own distinct funding goals. +The factory contract features: + +- **Campaign Creation**: Utilizes the `createCampaign` method to initiate a new +`CrowdfundingCampaign` contract. This function takes a `fundingGoal` as an argument, +deploys a new campaign contract with this goal, and tracks the created campaign in the +`campaigns` array. +- **Campaign Tracking**: The `getCampaigns` method offers a view into all the campaigns +created by the factory, allowing for easy access and management of multiple crowdfunding +initiatives. + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.1 and solc v0.8.17 +Compiling 15 Solidity files +Successfully compiled 15 Solidity files +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +### Deploy CrowdfundingCampaigns via the CrowdfundingFactory + +This section outlines the steps to deploy the `CrowdfundingCampaign` contract +using our new `CrowdfundingFactory`. + +The deployment script is located at `/deploy/deployUsingFactory.ts`. + +::drop-panel + ::panel{label="deployUsingFactory.ts"} + + ```typescript [deployUsingFactory.ts] + import { deployContract, getWallet } from "./utils"; + import { ethers } from "ethers"; + import { HardhatRuntimeEnvironment } from "hardhat/types"; + + export default async function (hre: HardhatRuntimeEnvironment) { + const contractArtifactName = "CrowdfundingFactory"; + const constructorArguments = []; + const crowdfundingFactory = await deployContract(contractArtifactName, constructorArguments); + + console.log(`🏭 CrowdfundingFactory address: ${crowdfundingFactory.target}`); + + const contractArtifact = await hre.artifacts.readArtifact("CrowdfundingFactory"); + const factoryContract = new ethers.Contract( + crowdfundingFactory.target, + contractArtifact.abi, + getWallet() + ); + + // Define funding goal for the campaign, e.g., 0.1 ether + const fundingGoalInWei = ethers.parseEther('0.1').toString(); + + // Use the factory to create a new CrowdfundingCampaign + const createTx = await factoryContract.createCampaign(fundingGoalInWei); + await createTx.wait(); + + // Retrieve the address of the newly created CrowdfundingCampaign + const campaigns = await factoryContract.getCampaigns(); + const newCampaignAddress = campaigns[campaigns.length - 1]; + + console.log(`🚀 New CrowdfundingCampaign deployed at: ${newCampaignAddress}`); + console.log('✅ Deployment and campaign creation complete!'); + } + ``` + + :: +:: + +- The `deployUsingFactory.ts` script deploys the `CrowdfundingFactory` through the `deployContract` method. +- An instance of the factory is assigned to `factoryContract`. + This gives us access to the factory's functionalities. +- The `createCampaign` method is called on this instance to create + and deploy a new crowdfunding campaign contract. + +Run the deployment command. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployUsingFactory.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployUsingFactory.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployUsingFactory.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployUsingFactory.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpx exec hardhat deploy-zksync --script deployUsingFactory.ts +# To deploy the contract on local in-memory node: +# pnpx exec hardhat deploy-zksync --script deployUsingFactory.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployUsingFactory.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployUsingFactory.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract address, source, and encoded constructor arguments: + +```bash +Starting deployment process of "CrowdfundingFactory"... +Estimated deployment cost: 0.0002500236 ETH + +"CrowdfundingFactory" was successfully deployed: + - Contract address: 0xD084EF36f8F5353f70498cD84cb8D2B844C120a8 + - Contract source: contracts/CrowdfundFactory.sol:CrowdfundingFactory + - Encoded constructor arguments: 0x + +Requesting contract verification... +Your verification ID is: 10097 +Contract successfully verified on zkSync block explorer! +🏭 CrowdfundingFactory address: 0xD084EF36f8F5353f70498cD84cb8D2B844C120a8 +🚀 New CrowdfundingCampaign deployed at: 0x060B748eC3512795E94045c406CFd5877DD84e4D +✅ Deployment and campaign creation complete! +``` + +🌟 Brilliant! Your contract factory and its first crowdfunding campaign are now +operational. diff --git a/content/00.build/10.zksync-101/_dir.yml b/content/00.build/10.zksync-101/_dir.yml new file mode 100644 index 00000000..a409bec0 --- /dev/null +++ b/content/00.build/10.zksync-101/_dir.yml @@ -0,0 +1 @@ +title: zkSync 101 diff --git a/content/00.build/10.zksync-101/_hello-zksync/_foundry_deploy_contract.md b/content/00.build/10.zksync-101/_hello-zksync/_foundry_deploy_contract.md new file mode 100644 index 00000000..cca854a0 --- /dev/null +++ b/content/00.build/10.zksync-101/_hello-zksync/_foundry_deploy_contract.md @@ -0,0 +1,161 @@ +--- +title: Foundry | Deploy Contract +--- + +:display-partial{path = "/_partials/_foundry_alpha_warning"} + +Run the following command in your terminal to initialize the Foundry project. + +```sh +npx zksync-cli@latest create --template qs-fs-hello-zksync hello-zksync-foundry-quickstart +cd hello-zksync-foundry-quickstart +``` + +## Set up your wallet + +:display-partial{path = "/build/zksync-101/_partials/_setup-wallet"} + +## Compile your first contract + +This guide +introduces a crowdfunding campaign contract aimed at supporting Zeek's inventive ventures. + +Let's start by reviewing the starter contract in the [`src/` directory](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/foundry/hello-zksync/src/Crowdfund.sol). + +::drop-panel + ::panel{label="CrowdfundingCampaign.sol"} + ```solidity [CrowdfundingCampaign.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + contract CrowdfundingCampaign { + address public owner; + uint256 public fundingGoal; + uint256 public totalFundsRaised; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + constructor(uint256 _fundingGoal) { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + require(msg.value > 0, "Contribution must be greater than 0"); + contributions[msg.sender] += msg.value; + totalFundsRaised += msg.value; + + emit ContributionReceived(msg.sender, msg.value); + + if (totalFundsRaised >= fundingGoal) { + emit GoalReached(totalFundsRaised); + } + } + + function withdrawFunds() public { + require(msg.sender == owner, "Only the owner can withdraw funds"); + require(totalFundsRaised >= fundingGoal, "Funding goal not reached"); + + uint256 amount = address(this).balance; + totalFundsRaised = 0; + + (bool success, ) = payable(owner).call{value: amount}(""); + require(success, "Transfer failed."); + } + + function getTotalFundsRaised() public view returns (uint256) { + return totalFundsRaised; + } + + function getFundingGoal() public view returns (uint256) { + return fundingGoal; + } + } + ``` + :: +:: + +The `CrowdfundingCampaign` contract is designed for project crowdfunding. +Owned and deployed with a set funding goal, it features: + +- A constructor to initialize the campaign's funding target. +- The `contribute` method to log funds, triggering `ContributionReceived` and `GoalReached` events. +- The `withdrawFunds` method, allowing the owner to collect accumulated funds post-goal achievement. + +Smart contracts deployed to zkSync must be compiled using our custom compiler. +For this particular guide we are making use of `zksolc`. + +To compile the contracts in the project, run the following command: + +```bash +forge build --zksync --use=0.8.24 +``` + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +[⠒] Compiling... +[⠃] Compiling 2 files with 0.8.20 +[⠊] Solc 0.8.20 finished in 736.48ms +Compiler run successful! +Compiling contracts for zkSync Era with zksolc v1.4.0 +``` + +The compiled zkEVM artifacts will be located in the `/zkout` folder, and the solc artifacts will be +located in the `/out` folder. + +### Deploy the CrowdfundingCampaign contract + +The deployment script is located at [`/script/Deploy.s.sol`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/foundry/hello-zksync/script/Deploy.s.sol). + +```solidity [Deploy.s.sol] +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.13; + +import "forge-std/Script.sol"; +import "../src/Crowdfund.sol"; + +contract DeployCrowdfundContract is Script { + function run() external { + uint256 deployerPrivateKey = vm.envUint("WALLET_PRIVATE_KEY"); + vm.startBroadcast(deployerPrivateKey); + + uint256 fundingGoalInWei = 0.02 ether; + new CrowdfundingCampaign(fundingGoalInWei); + + vm.stopBroadcast(); + } +} +``` + +**Key Components:** + +- **Constructor Argument:** The `CrowdfundingCampaign` contract is initialized with +a single constructor argument, `fundingGoalInWei`. +- **Broadcast Method:** The deployment uses `vm.startBroadcast(deployerPrivateKey)` to begin +the transaction broadcast and `vm.stopBroadcast()` to end it, facilitating the actual deployment of the contract on the blockchain. + +Execute the deployment command. + +```bash +forge script script/Deploy.s.sol:DeployCrowdfundContract --account myKeystore --sender --rpc-url zkSyncSepoliaTestnet --broadcast --zksync +# To deploy the contract on local in-memory node: +# forge script script/Deploy.s.sol:DeployCrowdfundContract --account myKeystore --sender --rpc-url inMemoryNode --broadcast --zksync +``` + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract address, transaction hash, and block number deployed to: + +```bash +... +✅ [Success]Hash: 0x69f5f1f0f5b3fa12ed2fbab4d6bb6edc02bbfff2f8c414d8171cc8295250296c +Contract Address: 0xB0C0d3d02c270b6ABe4862EA90bBa1Af192314a8 +Block: 1491370 +Paid: 0.0001168854 ETH (1168854 gas * 0.1 gwei) +``` + +🥳 Congratulations! Your smart contract is now deployed. 🚀 diff --git a/content/00.build/10.zksync-101/_hello-zksync/_hardhat_deploy_contract.md b/content/00.build/10.zksync-101/_hello-zksync/_hardhat_deploy_contract.md new file mode 100644 index 00000000..1695d051 --- /dev/null +++ b/content/00.build/10.zksync-101/_hello-zksync/_hardhat_deploy_contract.md @@ -0,0 +1,173 @@ +--- +title: Hardhat | Deploy Contract +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-hello-zksync hello-zksync-quickstart +cd hello-zksync-quickstart +``` + +## Set up your wallet + +:display-partial{path="build/zksync-101/_partials/_setup-wallet"} + +## Compile the CrowdfundingCampaign.sol contract + +This guide introduces a crowdfunding campaign contract aimed at supporting Zeek's inventive ventures. +Let's start by reviewing the starter contract in the [`contracts/` directory](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/hello-zksync/contracts/Crowdfund.sol). + +::drop-panel + ::panel{label="CrowdfundingCampaign.sol"} + ```solidity [CrowdfundingCampaign.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + contract CrowdfundingCampaign { + address public owner; + uint256 public fundingGoal; + uint256 public totalFundsRaised; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + constructor(uint256 _fundingGoal) { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + require(msg.value > 0, "Contribution must be greater than 0"); + contributions[msg.sender] += msg.value; + totalFundsRaised += msg.value; + + emit ContributionReceived(msg.sender, msg.value); + + if (totalFundsRaised >= fundingGoal) { + emit GoalReached(totalFundsRaised); + } + } + + function withdrawFunds() public { + require(msg.sender == owner, "Only the owner can withdraw funds"); + require(totalFundsRaised >= fundingGoal, "Funding goal not reached"); + + uint256 amount = address(this).balance; + totalFundsRaised = 0; + + (bool success, ) = payable(owner).call{value: amount}(""); + require(success, "Transfer failed."); + } + + function getTotalFundsRaised() public view returns (uint256) { + return totalFundsRaised; + } + + function getFundingGoal() public view returns (uint256) { + return fundingGoal; + } + } + ``` + :: +:: + +The `CrowdfundingCampaign` contract is designed for project crowdfunding. +This contract features: + +- A constructor to initialize the campaign's funding target. +- The `contribute` method to log funds, triggering `ContributionReceived` and `GoalReached` events. +- The `withdrawFunds` method, allowing the owner to collect accumulated funds post-goal achievement. + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.1 and solc v0.8.17 +Compiling 15 Solidity files +Successfully compiled 15 Solidity files +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +## Deploy the contract + +The deployment script is located at [`/deploy/deploy.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/hello-zksync/deploy/deploy.ts). + +```typescript [deploy.ts] +import { deployContract } from "./utils"; + +// An example of a basic deploy script +// It will deploy a CrowdfundingCampaign contract to selected network +// `parseEther` converts ether to wei, and `.toString()` ensures serialization compatibility. +export default async function () { + const contractArtifactName = "CrowdfundingCampaign"; + const constructorArguments = [ethers.parseEther('.02').toString()]; + await deployContract(contractArtifactName, constructorArguments); +} +``` + +**Key Components:** + +- **contractArtifactName:** Identifies the `CrowdfundingCampaign` contract for deployment. +- **constructorArguments:** Sets initialization parameters for the contract. In this case, +the fundraising goal, converted from ether to `wei` to match Solidity's `uint256` type. + +1. Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deploy.ts +# The deploy keyword will trigger the `deploy` script command on `package.json` +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deploy.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deploy.ts +# The deploy keyword will trigger the `deploy` script command on `package.json` +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deploy.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deploy.ts +# The deploy keyword will trigger the `deploy` script command on `package.json` +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deploy.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deploy.ts +# The deploy keyword will trigger the `deploy` script command on `package.json` +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deploy.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract address, source, and encoded constructor arguments: + +```bash +Starting deployment process of "CrowdfundingCampaign"... +Estimated deployment cost: 0.000501 ETH + +"CrowdfundingCampaign" was successfully deployed: + - Contract address: 0x4E3404F21b29d069539e15f8f9E712CeAE39d90C + - Contract source: contracts/Crowdfund.sol:CrowdfundingCampaign + - Encoded constructor arguments: 0x00000000000000000000000000000000000000000000000000470de4df820000 + +Requesting contract verification... +Your verification ID is: 10067 +Contract successfully verified on zkSync block explorer! +``` + +🥳 Congratulations! Your smart contract is now deployed. 🚀 diff --git a/content/00.build/10.zksync-101/_partials/_setup-wallet.md b/content/00.build/10.zksync-101/_partials/_setup-wallet.md new file mode 100644 index 00000000..94f55e1b --- /dev/null +++ b/content/00.build/10.zksync-101/_partials/_setup-wallet.md @@ -0,0 +1,8 @@ +--- +title: Set up your wallet +--- + +Deploying contracts on the %%zk_testnet_name%% requires having testnet ETH. +If you're working within the local development environment, +you can utilize pre-configured rich wallets and skip this step. +For testnet deployments, you should have your wallet funded from the [previous step](/build/zksync-101#fund-your-wallet). diff --git a/content/00.build/10.zksync-101/_paymasters/_approval_paymaster_flow.md b/content/00.build/10.zksync-101/_paymasters/_approval_paymaster_flow.md new file mode 100644 index 00000000..ddfa92c8 --- /dev/null +++ b/content/00.build/10.zksync-101/_paymasters/_approval_paymaster_flow.md @@ -0,0 +1,406 @@ +--- +title: Approval Paymaster +description: Learn to deploy contract factories in the zkSync environment. +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-paymaster contract-paymaster-quickstart +cd contract-paymaster-quickstart +``` + +## Set up your wallet + +:display-partial{path = "/build/zksync-101/_partials/_setup-wallet"} + +--- + +## Understanding the `ApprovalPaymaster` contract + +Let's start by reviewing the `ApprovalFlowPaymaster.sol` contract in the `contracts/` directory: + +::drop-panel + ::panel{label="ApprovalFlowPaymaster.sol"} + ```solidity [ApprovalFlowPaymaster.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + import {IPaymaster, ExecutionResult, PAYMASTER_VALIDATION_SUCCESS_MAGIC} from "@matterlabs/zksync-contracts/l2/system-contracts/interfaces/IPaymaster.sol"; + import {IPaymasterFlow} from "@matterlabs/zksync-contracts/l2/system-contracts/interfaces/IPaymasterFlow.sol"; + import {TransactionHelper, Transaction} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/TransactionHelper.sol"; + + import "@matterlabs/zksync-contracts/l2/system-contracts/Constants.sol"; + + import "@openzeppelin/contracts/token/ERC20/IERC20.sol"; + import "@openzeppelin/contracts/access/Ownable.sol"; + + /// @notice This smart contract pays the gas fees for accounts with balance of a specific ERC20 token. It makes use of the approval-based flow paymaster. + contract ApprovalFlowPaymaster is IPaymaster, Ownable { + uint256 constant PRICE_FOR_PAYING_FEES = 1; + + address public allowedToken; + + modifier onlyBootloader() { + require( + msg.sender == BOOTLOADER_FORMAL_ADDRESS, + "Only bootloader can call this method" + ); + _; + } + + constructor() { + // Sepolia CROWN token address + allowedToken = 0x927488F48ffbc32112F1fF721759649A89721F8F; + } + + function validateAndPayForPaymasterTransaction( + bytes32, + bytes32, + Transaction calldata _transaction + ) + external + payable + onlyBootloader + returns (bytes4 magic, bytes memory context) + { + // Default to transaction acceptance + magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; + require( + _transaction.paymasterInput.length >= 4, + "The standard paymaster input must be at least 4 bytes long" + ); + + bytes4 paymasterInputSelector = bytes4( + _transaction.paymasterInput[0:4] + ); + // Check if it's approval-based flow + if (paymasterInputSelector == IPaymasterFlow.approvalBased.selector) { + (address token, uint256 amount, bytes memory data) = abi.decode( + _transaction.paymasterInput[4:], + (address, uint256, bytes) + ); + + // Ensure the token is the allowed one + require(token == allowedToken, "Invalid token"); + + // Check user's allowance + address userAddress = address(uint160(_transaction.from)); + address thisAddress = address(this); + uint256 providedAllowance = IERC20(token).allowance(userAddress, thisAddress); + require( + providedAllowance >= PRICE_FOR_PAYING_FEES, + "Min allowance too low" + ); + + uint256 requiredETH = _transaction.gasLimit * _transaction.maxFeePerGas; + try IERC20(token).transferFrom(userAddress, thisAddress, amount) {} + catch (bytes memory revertReason) { + if (revertReason.length <= 4) { + revert("Failed to transferFrom from user's account"); + } else { + assembly { + revert(add(0x20, revertReason), mload(revertReason)) + } + } + } + + (bool success, ) = payable(BOOTLOADER_FORMAL_ADDRESS).call{value: requiredETH}(""); + require(success, "Failed to transfer tx fee to bootloader."); + } else { + revert("Unsupported paymaster flow"); + } + } + + function postTransaction( + bytes calldata _context, + Transaction calldata _transaction, + bytes32, + bytes32, + ExecutionResult _txResult, + uint256 _maxRefundedGas + ) external payable override onlyBootloader {} + + function withdraw(address _to) external onlyOwner { + (bool success, ) = payable(_to).call{value: address(this).balance}(""); + require(success, "Failed to withdraw funds from paymaster."); + } + + receive() external payable {} + } + ``` + :: +:: + +**Key components:** + +- The `ApprovalFlowPaymaster` contract allows for transactions costs to be covered using an allowed ERC20 token at the +exchange of 1. +- **Allowed Token:** Transactions are facilitated using the `CROWN` token at address [0x927488F48ffbc32112F1fF721759649A89721F8F](https://sepolia.explorer.zksync.io/address/0x927488F48ffbc32112F1fF721759649A89721F8F#contract), +with a fee set at a constant value of 1. +- **`validateAndPayForPaymasterTransaction` Method:** This critical method evaluates transactions +to decide if the contract will cover the gas fees. It confirms the token used matches the allowed token +and checks if the token allowance is adequate. If conditions are met, it proceeds to transfer funds calculated +as `tx.gasprice * tx.gasLimit` to the `bootloader`. +- **`postTransaction`** Method: An optional method invoked +post-transaction execution, provided the transaction doesn't fail +due to out-of-gas errors. It receives several parameters, including the transaction's context and result, aiding in finalizing paymaster duties. +- **`onlyBootloader`** Modifier: Ensures that certain methods are exclusively callable by the system's bootloader, +adding an extra layer of security and control. + +--- + +## Compile and deploy the `ApprovalFlowPaymaster` contract + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 1 Solidity file +Successfully compiled 1 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +The script to deploy the `ApprovalFlowPaymaster` contract is located at [`/deploy/deployApprovalFlowPaymaster.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/deploy/deployApprovalFlowPaymaster.ts). + +```typescript [deployApprovalFlowPaymaster.ts] +import { deployContract, getWallet, getProvider } from "./utils"; +import { ethers } from "ethers"; + +// An example of a basic deploy script +// It will deploy a CrowdfundingCampaign contract to selected network +// `parseEther` converts ether to wei, and `.toString()` ensures serialization compatibility. +export default async function() { + const contractArtifactName = "ApprovalFlowPaymaster"; + const constructorArguments = []; + const contract = await deployContract( + contractArtifactName, + constructorArguments + ); + const wallet = getWallet(); + const provider = getProvider(); + + // Supplying paymaster with ETH + // Paymaster will receive CROWN tokens from users and + // cover the gas fees for the transactions using ETH + await ( + await wallet.sendTransaction({ + to: contract.target, + value: ethers.parseEther("0.005"), + }) + ).wait(); + + let paymasterBalance = await provider.getBalance(contract.target.toString()); + console.log(`Paymaster ETH balance is now ${paymasterBalance.toString()}`); +} +``` + +**Key Components:** + +- **`deployContract` Method:** Utilized for deploying the `ApprovalFlowPaymaster` contract. This method takes the name of the +contract and any constructor arguments needed for initialization, +mirroring the deployment process used for the `CrowdfundingCampaign` contract. +- **Funding the Paymaster:** An important step where the deployed `ApprovalFlowPaymaster` contract is funded with ETH +to cover transaction fees for users. The script sends a transaction +from the deployer's wallet to the paymaster contract, ensuring it has sufficient balance to operate. + +Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployApprovalFlowPaymaster.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract address, source, and encoded constructor arguments: + +```bash +Starting deployment process of "ApprovalFlowPaymaster"... +Estimated deployment cost: 0.0006278488 ETH + +"ApprovalFlowPaymaster" was successfully deployed: + - Contract address: 0x4653CDB4D46c7CdFc5B1ff14ca1B15Db2B0b7819 + - Contract source: contracts/ApprovalFlowPaymaster.sol:ApprovalFlowPaymaster + - Encoded constructor arguments: 0x + +Requesting contract verification... +Your verification ID is: 10923 +Contract successfully verified on zkSync block explorer! +Paymaster ETH balance is now 5000000000000000 +``` + +--- + +## Interact with the `ApprovalFlowPaymaster` contract + +This section will navigate you through the steps to interact with the +[`ApprovalFlowPaymaster`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/contracts/ApprovalFlowPaymaster.sol) +contract, using it to cover transaction fees for your operation. + +### Obtain CROWN tokens + +The `ApprovalFlowPaymaster` requires CROWN tokens to cover transaction gas costs. To use this +paymaster, you will first need to acquire CROWN tokens by minting them yourself. Follow these +steps to mint CROWN tokens: + +1. Go to the [CROWN token contract on Sepolia](https://sepolia.explorer.zksync.io/address/0x927488F48ffbc32112F1fF721759649A89721F8F#contract). +2. Click on the **Contract** tab. +3. Navigate to the **Write** tab. +4. Locate the **mint** function, which is typically labeled as **6.mint**: + - In the `_to` field, enter your wallet address where you want the tokens to be deposited. + - In the `_amount` field, specify the number of tokens you wish to mint. +5. Click on the **mint** button to execute the transaction. + +Here is a visual guide to assist you: +![CROWN mint](/images/quickstart-paymasters/crown-mint.png) + +Ensure that your wallet is connected and configured for the Sepolia network before attempting to mint tokens. + +### Interaction script + +The interaction script is situated in the `/deploy/interact/` directory, named [`interactWithApprovalFlowPaymaster.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/deploy/interact/interactWithApprovalFlowPaymaster.ts). + +Ensure the `CONTRACT_ADDRESS` and `PAYMASTER_ADDRESS` variables are set to your deployed contract and paymaster addresses, respectively. + +::drop-panel + ::panel{label="interactWithApprovalFlowPaymaster.ts"} + + ```typescript [interactWithApprovalFlowPaymaster.ts] + import * as hre from "hardhat"; + import { getWallet, getProvider } from "../utils"; + import { ethers } from "ethers"; + import { utils } from "zksync-ethers"; + + // Address of the contract to interact with + const CONTRACT_ADDRESS = "YOUR-CONTRACT-ADDRESS"; + const PAYMASTER_ADDRESS = "YOUR-PAYMASTER-ADDRESS"; + // Sepolia CROWN token address + const TOKEN_ADDRESS = "0x927488F48ffbc32112F1fF721759649A89721F8F" + + if (!CONTRACT_ADDRESS || !PAYMASTER_ADDRESS) + throw new Error("Contract and Paymaster addresses are required."); + + export default async function() { + console.log(`Running script to interact with contract ${CONTRACT_ADDRESS} using paymaster ${PAYMASTER_ADDRESS}`); + + // Load compiled contract info + const contractArtifact = await hre.artifacts.readArtifact( + "CrowdfundingCampaignV2" + ); + const provider = getProvider(); + // Initialize contract instance for interaction + const contract = new ethers.Contract( + CONTRACT_ADDRESS, + contractArtifact.abi, + getWallet() + ); + + const contributionAmount = ethers.parseEther("0.001"); + // Get paymaster params for the ApprovalBased paymaster + const paymasterParams = utils.getPaymasterParams(PAYMASTER_ADDRESS, { + type: "ApprovalBased", + token: TOKEN_ADDRESS, + minimalAllowance: 1n, + innerInput: new Uint8Array(), + }); + + const gasLimit = await contract.contribute.estimateGas({ + value: contributionAmount, + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParams, + }, + }); + + const transaction = await contract.contribute({ + value: contributionAmount, + maxPriorityFeePerGas: 0n, + maxFeePerGas: await provider.getGasPrice(), + gasLimit, + // Pass the paymaster params as custom data + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams, + }, + }); + console.log(`Transaction hash: ${transaction.hash}`); + + await transaction.wait(); + } + ``` + + :: +:: + +**Key Components:** + +- **Paymaster Parameters:** Before executing transactions that involve the contract, the script prepares paymaster parameters using +`getPaymasterParams`. This specifies the paymaster contract to be +used and the type of paymaster flow, which in this case is `Approval`, and includes the token address +of the ERC20 token, and the minimum allowance set to 1. + +- **Transaction with Paymaster:** Demonstrated by the `contribute` function call, the script shows how to include paymaster parameters +in transactions. This allows the paymaster to cover transaction +fees using the `CROWN` token, providing a seamless experience for users. + +Execute the command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script interact/interactWithApprovalFlowPaymaster.ts +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script interact/interactWithApprovalFlowPaymaster.ts +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script interact/interactWithApprovalFlowPaymaster.ts +``` + +```bash [bun] +bun run hardhat deploy-zksync --script interact/interactWithApprovalFlowPaymaster.ts +``` + +:: + +Upon successful usage, you'll receive output detailing the transaction: + +```bash +Running script to interact with contract 0x68E8533acE01019CB8D07Eca822369D5De71b74D using paymaster 0x4653CDB4D46c7CdFc5B1ff14ca1B15Db2B0b7819 +Estimated gas limit: 459220 +Transaction hash: 0x6a5a5e8e7d7668a46260b6daf19c7a5579fa4a5ba4591977a944abb1a618187a +``` + +🎉 Great job! You've successfully interacted with the `CrowdfundingCampaignV2` using a paymaster to cover the transaction fees using the `CROWN` token. diff --git a/content/00.build/10.zksync-101/_paymasters/_general_paymaster_flow.md b/content/00.build/10.zksync-101/_paymasters/_general_paymaster_flow.md new file mode 100644 index 00000000..aedb07c6 --- /dev/null +++ b/content/00.build/10.zksync-101/_paymasters/_general_paymaster_flow.md @@ -0,0 +1,358 @@ +--- +title: General Paymaster +description: Learn to deploy contract factories in the zkSync environment. +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-paymaster contract-paymaster-quickstart +cd contract-paymaster-quickstart +``` + +## Set up your wallet + +:display-partial{path = "/build/zksync-101/_partials/_setup-wallet"} + +--- + +## Understanding the `GeneralPaymaster` contract + +Let's start by reviewing the [`GeneralPaymaster.sol`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/contracts/GaslessPaymaster.sol) +contract in the `contracts/` directory: + +::drop-panel + ::panel{label="GeneralPaymaster.sol"} + ```solidity [GeneralPaymaster.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + import {IPaymaster, ExecutionResult, PAYMASTER_VALIDATION_SUCCESS_MAGIC} from "@matterlabs/zksync-contracts/l2/system-contracts/interfaces/IPaymaster.sol"; + import {IPaymasterFlow} from "@matterlabs/zksync-contracts/l2/system-contracts/interfaces/IPaymasterFlow.sol"; + import {TransactionHelper, Transaction} from "@matterlabs/zksync-contracts/l2/system-contracts/libraries/TransactionHelper.sol"; + import "@matterlabs/zksync-contracts/l2/system-contracts/Constants.sol"; + import "@openzeppelin/contracts/access/Ownable.sol"; + + /// @notice This contract does not include any validations other than using the paymaster general flow. + contract GaslessPaymaster is IPaymaster, Ownable { + modifier onlyBootloader() { + require( + msg.sender == BOOTLOADER_FORMAL_ADDRESS, + "Only bootloader can call this method" + ); + // Continue execution if called from the bootloader. + _; + } + + function validateAndPayForPaymasterTransaction( + bytes32, + bytes32, + Transaction calldata _transaction + ) + external + payable + onlyBootloader + returns (bytes4 magic, bytes memory context) + { + // By default we consider the transaction as accepted. + magic = PAYMASTER_VALIDATION_SUCCESS_MAGIC; + require( + _transaction.paymasterInput.length >= 4, + "The standard paymaster input must be at least 4 bytes long" + ); + + bytes4 paymasterInputSelector = bytes4( + _transaction.paymasterInput[0:4] + ); + if (paymasterInputSelector == IPaymasterFlow.general.selector) { + // Note, that while the minimal amount of ETH needed is tx.gasPrice * tx.gasLimit, + // neither paymaster nor account are allowed to access this context variable. + uint256 requiredETH = _transaction.gasLimit * + _transaction.maxFeePerGas; + + // The bootloader never returns any data, so it can safely be ignored here. + (bool success, ) = payable(BOOTLOADER_FORMAL_ADDRESS).call{ + value: requiredETH + }(""); + require( + success, + "Failed to transfer tx fee to the Bootloader. Paymaster balance might not be enough." + ); + } else { + revert("Unsupported paymaster flow in paymasterParams."); + } + } + + function postTransaction( + bytes calldata _context, + Transaction calldata _transaction, + bytes32, + bytes32, + ExecutionResult _txResult, + uint256 _maxRefundedGas + ) external payable override onlyBootloader { + // Refunds are not supported yet. + } + + function withdraw(address payable _to) external onlyOwner { + // send paymaster funds to the owner + uint256 balance = address(this).balance; + (bool success, ) = _to.call{value: balance}(""); + require(success, "Failed to withdraw funds from paymaster."); + } + + receive() external payable {} + } + ``` + :: +:: + +**Key components:** + +- The `GaslessPaymaster` contract ensures that transaction fees are handled automatically without user intervention. +- **`validateAndPayForPaymasterTransaction` Method:** This mandatory method assesses whether the paymaster agrees to cover the +transaction fees. If affirmative, it transfers the necessary funds +(calculated as tx.gasprice * tx.gasLimit) to the operator. It returns a context for the `postTransaction` method. +- **`postTransaction`** Method: An optional method invoked +post-transaction execution, provided the transaction doesn't fail +due to out-of-gas errors. It receives several parameters, including the transaction's context and result, aiding in finalizing paymaster duties. +- **`onlyBootloader`** Modifier: Ensures that certain methods are +exclusively callable by the system's bootloader, adding an extra layer of security and control. + +--- + +## Compile and deploy the `GeneralPaymaster` contract + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 4 Solidity file +Successfully compiled 4 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +The script to deploy the `GaslessPaymaster` is located at [`/deploy/deployGaslessPaymaster.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/deploy/deployGaslessPaymaster.ts). + +```typescript [deployGaslessPaymaster.ts] +import { deployContract, getWallet, getProvider } from "./utils"; +import { ethers } from "ethers"; + +// An example of a basic deploy script +// It will deploy a CrowdfundingCampaign contract to selected network +// `parseEther` converts ether to wei, and `.toString()` ensures serialization compatibility. +export default async function() { + const contractArtifactName = "GaslessPaymaster"; + const constructorArguments = []; + const contract = await deployContract( + contractArtifactName, + constructorArguments + ); + const wallet = getWallet(); + const provider = getProvider(); + + // Supplying paymaster with ETH + await ( + await wallet.sendTransaction({ + to: contract.target, + value: ethers.parseEther("0.005"), + }) + ).wait(); + + let paymasterBalance = await provider.getBalance(contract.target.toString()); + console.log(`Paymaster ETH balance is now ${paymasterBalance.toString()}`); +} +``` + +**Key Components:** + +- **`deployContract` Method:** Utilized for deploying the `GaslessPaymaster` contract. This method takes the name of the +contract and any constructor arguments needed for initialization, +mirroring the deployment process used for the `CrowdfundingCampaign` contract. +- **Funding the Paymaster:** An important step where the deployed `GaslessPaymaster` contract is funded with ETH +to cover transaction fees for users. The script sends a transaction +from the deployer's wallet to the paymaster contract, ensuring it has sufficient balance to operate. + +Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployGaslessPaymaster.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployGaslessPaymaster.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployGaslessPaymaster.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployGaslessPaymaster.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deployGaslessPaymaster.ts +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deployGaslessPaymaster.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployGaslessPaymaster.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployGaslessPaymaster.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract address, source, and encoded constructor arguments: + +```bash +Starting deployment process of "GaslessPaymaster"... +Estimated deployment cost: 0.0004922112 ETH + +"GaslessPaymaster" was successfully deployed: + - Contract address: 0x6f72f0d7bDba2E2a923beC09fBEE64cD134680F2 + - Contract source: contracts/GaslessPaymaster.sol:GaslessPaymaster + - Encoded constructor arguments: 0x + +Requesting contract verification... +Your verification ID is: 10634 +Contract successfully verified on zkSync block explorer! +Paymaster ETH balance is now 5000000000000000 +``` + +--- + +## Interact with the GeneralPaymaster contract + +This section will navigate you through the steps to interact with the `GeneralPaymaster` contract, +using it to cover transaction fees for your operation. + +The interaction script is situated in the `/deploy/interact/` directory, named [`interactWithGaslessPaymaster.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/paymaster/deploy/interact/interactWithGaslessPaymaster.ts). + +Ensure the `CONTRACT_ADDRESS` and `PAYMASTER_ADDRESS` variables are set to your deployed contract and paymaster addresses, respectively. + +::drop-panel + ::panel{label="interactWithGaslessPaymaster.ts"} + + ```typescript [interactWithGaslessPaymaster.ts] + import * as hre from "hardhat"; + import { getWallet, getProvider } from "../utils"; + import { ethers } from "ethers"; + import { utils } from "zksync-ethers"; + + // Address of the contract to interact with + const CONTRACT_ADDRESS = "YOUR-CONTRACT-ADDRESS"; + const PAYMASTER_ADDRESS = "YOUR-PAYMASTER-ADDRESS"; + if (!CONTRACT_ADDRESS || !PAYMASTER_ADDRESS) + throw new Error("Contract and Paymaster addresses are required."); + + export default async function() { + console.log(`Running script to interact with contract ${CONTRACT_ADDRESS} using paymaster ${PAYMASTER_ADDRESS}`); + + // Load compiled contract info + const contractArtifact = await hre.artifacts.readArtifact( + "CrowdfundingCampaignV2" + ); + + // Initialize contract instance for interaction + const contract = new ethers.Contract( + CONTRACT_ADDRESS, + contractArtifact.abi, + getWallet() + ); + + const provider = getProvider(); + let balanceBeforeTransaction = await provider.getBalance(getWallet().address); + console.log(`Wallet balance before contribution: ${ethers.formatEther(balanceBeforeTransaction)} ETH`); + + const contributionAmount = ethers.parseEther("0.01"); + // Get paymaster params + const paymasterParams = utils.getPaymasterParams(PAYMASTER_ADDRESS, { + type: "General", + innerInput: new Uint8Array(), + }); + + const gasLimit = await contract.contribute.estimateGas({ + value: contributionAmount, + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParams, + }, + }); + + const transaction = await contract.contribute({ + value: contributionAmount, + maxPriorityFeePerGas: 0n, + maxFeePerGas: await provider.getGasPrice(), + gasLimit, + // Pass the paymaster params as custom data + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams, + }, + }); + console.log(`Transaction hash: ${transaction.hash}`); + + await transaction.wait(); + + let balanceAfterTransaction = await provider.getBalance(getWallet().address); + // Check the wallet balance after the transaction + // We only pay the contribution amount, so the balance should be less than before + // Gas fees are covered by the paymaster + console.log(`Wallet balance after contribution: ${ethers.formatEther(balanceAfterTransaction)} ETH`); + } + ``` + + :: +:: + +**Key Components:** + +- **Paymaster Parameters:** Before executing transactions that involve the contract, the script prepares paymaster parameters using +`getPaymasterParams`. This specifies the paymaster contract to be +used and the type of paymaster flow, which in this case is `General`. + +- **Transaction with Paymaster:** Demonstrated by the `contribute` function call, the script shows how to include paymaster parameters +in transactions. This allows the paymaster to cover transaction +fees, providing a seamless experience for users. + +Execute the command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script interact/interactWithGaslessPaymaster.ts +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script interact/interactWithGaslessPaymaster.ts +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script interact/interactWithGaslessPaymaster.ts +``` + +```bash [bun] +bun run hardhat deploy-zksync --script interact/interactWithGaslessPaymaster.ts +``` + +:: + +Upon successful usage, you'll receive output detailing the transaction: + +```bash +Running script to interact with contract 0x68E8533acE01019CB8D07Eca822369D5De71b74D using paymaster 0x6f72f0d7bDba2E2a923beC09fBEE64cD134680F2 +Wallet balance before contribution: 5.879909434005856127 ETH +Transaction hash: 0x41c463abf7905552b69b25e7918374aab27f2d7e8cbebe212a0eb6ef8deb81e8 +Wallet balance after contribution: 5.869909434005856127 ETH +``` + +🎉 Great job! You've successfully interacted with the `CrowdfundingCampaignV2` using a paymaster to cover the transaction fees. diff --git a/content/00.build/10.zksync-101/_testing/_foundry_contract_testing.md b/content/00.build/10.zksync-101/_testing/_foundry_contract_testing.md new file mode 100644 index 00000000..c791930c --- /dev/null +++ b/content/00.build/10.zksync-101/_testing/_foundry_contract_testing.md @@ -0,0 +1,209 @@ +--- +title: Foundry | Contract Testing +--- + +:display-partial{path = "/_partials/_foundry_alpha_warning"} + +Run the following command in your terminal to initialize the Foundry project. + +```sh +npx zksync-cli@latest create --template qs-fs-testing foundry-contract-testing-quickstart +cd foundry-contract-testing-quickstart +``` + +--- + +## Test the `CrowdfundingCampaign` contract + +Now that our setup is complete, it's time to focus on the core of this +guide - testing our `CrowdfundingCampaign.sol` contract. Here's a quick +refresher on its structure: + +::drop-panel + ::panel{label="CrowdfundingCampaign.sol"} + ```solidity [CrowdfundingCampaign.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + contract CrowdfundingCampaign { + address public owner; + uint256 public fundingGoal; + uint256 public totalFundsRaised; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + constructor(uint256 _fundingGoal) { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + require(msg.value > 0, "Contribution must be greater than 0"); + contributions[msg.sender] += msg.value; + totalFundsRaised += msg.value; + + emit ContributionReceived(msg.sender, msg.value); + + if (totalFundsRaised >= fundingGoal) { + emit GoalReached(totalFundsRaised); + } + } + + function withdrawFunds() public { + require(msg.sender == owner, "Only the owner can withdraw funds"); + require(totalFundsRaised >= fundingGoal, "Funding goal not reached"); + + uint256 amount = address(this).balance; + totalFundsRaised = 0; + + (bool success, ) = payable(owner).call{value: amount}(""); + require(success, "Transfer failed."); + } + + function getTotalFundsRaised() public view returns (uint256) { + return totalFundsRaised; + } + + function getFundingGoal() public view returns (uint256) { + return fundingGoal; + } + } + ``` + :: +:: + +Thorough testing involves scrutinizing every function and aspect of our contract, +including potential failure scenarios. In this guide, we'll focus in on the `contribute` +method to ensure it's tested. + +As a challenge to hone your testing skills further, +consider devising additional tests for the `withdrawFunds`, `getTotalFundsRaised`, +and `getFundingGoal` methods, expanding your test coverage and reinforcing the +reliability of the contract. + +### Compile contract + +Smart contracts deployed to zkSync must be compiled using our custom compiler. +For this particular guide we are making use of `zksolc`. + +To compile the contracts in the project, run the following command: + +```bash +forge build --zksync --use 0.8.20 +``` + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +[⠒] Compiling... +[⠃] Compiling 22 files with 0.8.20 +[⠊] Solc 0.8.20 finished in 736.48ms +Compiler run successful! +Compiling contracts for zkSync Era with zksolc v1.4.0 +``` + +The compiled zkEVM artifacts will be located in the `/zkout` folder, and the solc artifacts will be +located in the `/out` folder. + +### Run the test command + +This section describes the testing `CrowdfundingCampaign.sol` contract. Let's +start by reviewing the tests for `CrowdfundingCampaign.sol` contract provided +during the initialization step in the `/test` directory, specifically the +[`CrowdfundingCampaign.t.sol` file](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/foundry/testing/test/CrowdfundingCampaign.t.sol). + +```solidity [CrowdfundingCampaign.sol] +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import "forge-std/Test.sol"; +import "forge-std/console.sol"; +import "../src/CrowdfundingCampaign.sol"; + +contract CrowdfundingCampaignTest is Test { + CrowdfundingCampaign campaign; + event GoalReached(uint256 totalFundsRaised); + address owner; + address addr1; + address addr2; + + function setUp() public { + owner = address(this); + + addr1 = vm.addr(1); + addr2 = vm.addr(2); + + campaign = new CrowdfundingCampaign(1 ether); + console.log("CrowdfundingCampaign deployed at: %s", address(campaign)); + } + + function test_RejectZeroContributions() public { + vm.expectRevert("Contribution must be greater than 0"); + campaign.contribute{value: 0}(); + } + + function test_AggregateContributions() public { + uint256 initialTotal = campaign.getTotalFundsRaised(); + + vm.prank(addr1); + vm.deal(addr1, 2 ether); + campaign.contribute{value: 0.5 ether}(); + + vm.prank(addr2); + vm.deal(addr2, 2 ether); + campaign.contribute{value: 0.3 ether}(); + + assertEq(campaign.getTotalFundsRaised(), initialTotal + 0.8 ether); + } + + function test_EmitGoalReachedWhenFundingGoalMet() public { + vm.prank(addr1); + vm.deal(addr1, 2 ether); + vm.expectEmit(true, true, false, true); + emit GoalReached(1 ether); + campaign.contribute{value: 1 ether}(); + } +} +``` + +- **Environment Setup**: Leverages Foundry's `Test` contract and setup functions +to prepare the test environment, ensuring a fresh state for each test case. +- **Deployment and Address Simulation**: Deploys the `CrowdfundingCampaign` contract +within the test setup and simulates addresses using Foundry's `vm.addr()` function for +various test actors. + +**`contribute` Method Tests:** + +- **Zero Contribution Validation**: Asserts that the contract rejects contribution +attempts with zero value, testing the contract's input validation logic. +- **Contribution Aggregation**: Confirms the contract's ability to correctly tally +contributions from various addresses, ensuring accurate tracking of the total funds raised. +- **Event Emission Upon Goal Achievement**: Utilizes Foundry's `vm.expectEmit` to +anticipate the `GoalReached` event when the funding goal is met, validating the +contract's event logic and state transitions. + +Execute the test command: + +```bash +forge test --zksync +``` + +Upon completion, the test suite will provide a summary of all executed tests, +indicating their success or failure: + +```bash +Ran 3 tests for test/CrowdfundingCampaign.t.sol:CrowdfundingCampaignTest +[PASS] test_AggregateContributions() (gas: 29204) +[PASS] test_EmitGoalReachedWhenFundingGoalMet() (gas: 18862) +[PASS] test_RejectZeroContributions() (gas: 8148) +Suite result: ok. 3 passed; 0 failed; 0 skipped; finished in 44.03ms (43.94ms CPU time) + +Ran 1 test suite in 48.11ms (44.03ms CPU time): 3 tests passed, 0 failed, 0 skipped (3 total tests) +``` + +🎉 Congratulations! The `contribute` method of the `CrowdfundingCampaign` contract +has been thoroughly tested and is ready for action. diff --git a/content/00.build/10.zksync-101/_testing/_hardhat_contract_testing.md b/content/00.build/10.zksync-101/_testing/_hardhat_contract_testing.md new file mode 100644 index 00000000..068209a5 --- /dev/null +++ b/content/00.build/10.zksync-101/_testing/_hardhat_contract_testing.md @@ -0,0 +1,238 @@ +--- +title: Hardhat | Contract Testing +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-testing contract-testing-quickstart +cd contract-testing-quickstart +``` + +--- + +## Local Era Node + +While setting up a local development environment was previously optional, testing contracts requires +a more structured setup. We'll use `hardhat-zksync` to run tests against an In-memory node, +which operates seamlessly within a separate process for an optimized testing workflow. + +If you have not set up your local era node yet, follow the instructions in the [Getting Started](/build/zksync-101#setup-era-local-node-optional) section. + +Within the `hardhat.config.ts`, you'll observe the `zksync` flag set to `true` under the +`hardhat` network, indicating the integration with zkSync's testing environment. + +```typescript [hardhat.config.ts] +hardhat: { + zksync: true, +}, +``` + +To use the In-memory node for testing, ensure the `hardhat` network is selected with +the `zksync` flag enabled. This setup initiates the node alongside your tests and ensures +it terminates once testing is complete. The node's port allocation starts at the default +`8011`, facilitating smooth and isolated test execution. + +Secondly within the `hardhat.config.ts`, you'll observe the importing of +`@nomicfoundation/hardhat-chai-matchers`. This plugin provides Hardhat with an extended +suite of assertion methods tailored for contract testing, significantly improving the testing +toolkit available for your project. + +```typescript +import "@nomicfoundation/hardhat-chai-matchers"; +``` + +### Test Wallet Configuration + +For testing purposes, we use pre-configured, well-funded wallets. During this testing guide, we will use the following pre-configured wallet, +which eliminates the need for manual funding or setup: + +- **Account Address:** `0x36615Cf349d7F6344891B1e7CA7C72883F5dc049` +- **Private Key:** `0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110` + +This streamlined approach allows us to focus on writing and running effective tests. + +--- + +## Compile the `CrowdfundingCampaign` contract + +Now that our setup is complete, it's time to focus on the core of this +guide - testing our `CrowdfundingCampaign.sol` contract. Here's a quick +refresher on its structure: + +::drop-panel + ::panel{label="CrowdfundingCampaign.sol"} + ```solidity [CrowdfundingCampaign.sol] + // SPDX-License-Identifier: MIT + pragma solidity ^0.8.0; + + contract CrowdfundingCampaign { + address public owner; + uint256 public fundingGoal; + uint256 public totalFundsRaised; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + constructor(uint256 _fundingGoal) { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + require(msg.value > 0, "Contribution must be greater than 0"); + contributions[msg.sender] += msg.value; + totalFundsRaised += msg.value; + + emit ContributionReceived(msg.sender, msg.value); + + if (totalFundsRaised >= fundingGoal) { + emit GoalReached(totalFundsRaised); + } + } + + function withdrawFunds() public { + require(msg.sender == owner, "Only the owner can withdraw funds"); + require(totalFundsRaised >= fundingGoal, "Funding goal not reached"); + + uint256 amount = address(this).balance; + totalFundsRaised = 0; + + (bool success, ) = payable(owner).call{value: amount}(""); + require(success, "Transfer failed."); + } + + function getTotalFundsRaised() public view returns (uint256) { + return totalFundsRaised; + } + + function getFundingGoal() public view returns (uint256) { + return fundingGoal; + } + } + ``` + :: +:: + +Thorough testing involves scrutinizing every function and aspect of our contract, +including potential failure scenarios. In this guide, we'll focus in on the `contribute` +method to ensure it's tested. + +As a challenge to hone your testing skills further, +consider writing additional tests for the `withdrawFunds`, `getTotalFundsRaised`, +and `getFundingGoal` methods, expanding your test coverage and reinforcing the +reliability of the contract. + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.1 and solc v0.8.17 +Compiling 15 Solidity files +Successfully compiled 15 Solidity files +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +--- + +## Test `CrowdfundingCampaign` + +This section describes testing the `CrowdfundingCampaign.sol` contract. Let's +start by reviewing the tests for `CrowdfundingCampaign.sol` contract provided +during the initialization step in the `/tests` directory, specifically the +[`crowdFunding.test.ts` file](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/testing/test/crowdFunding.test.ts). + +```typescript [crowdFunding.test.ts] +import "@nomicfoundation/hardhat-chai-matchers"; +import { expect } from "chai"; +import { ethers } from "ethers"; +import { getWallet, LOCAL_RICH_WALLETS, deployContract } from "../deploy/utils"; + +describe("CrowdfundingCampaign", function () { + let campaign; + let owner, addr1, addr2; + + beforeEach(async function () { + owner = getWallet(LOCAL_RICH_WALLETS[0].privateKey); + addr1 = getWallet(LOCAL_RICH_WALLETS[1].privateKey); + addr2 = getWallet(LOCAL_RICH_WALLETS[2].privateKey); + const fundingGoalInWei = ethers.parseEther('1').toString(); + campaign = await deployContract("CrowdfundingCampaign", [fundingGoalInWei], { wallet: owner, silent: true }); + }); + + describe("Contribute", function () { + it("should reject contributions of 0", async function () { + await expect(campaign.connect(addr1).contribute({ value: ethers.parseEther("0") })).to.be.revertedWith("Contribution must be greater than 0"); + }); + + it("should aggregate contributions in totalFundsRaised", async function () { + await campaign.connect(addr1).contribute({ value: ethers.parseEther("0.5") }); + await campaign.connect(addr2).contribute({ value: ethers.parseEther("0.3") }); + expect(await campaign.getTotalFundsRaised()).to.equal(ethers.parseEther("0.8")); + }); + + it("should emit GoalReached event when funding goal is met", async function () { + await expect(campaign.connect(addr1).contribute({ value: ethers.parseEther("1") })) + .to.emit(campaign, "GoalReached") + .withArgs(ethers.parseEther("1")); + }); + }); +}); +``` + +- **Initialization**: Each test case initializes with fresh contract instances and predefined +rich wallet accounts to simulate various contributors and the contract owner. +- **Deployment**: The `CrowdfundingCampaign` contract is deployed using the `deployContract` +utility, setting a specific funding goal for each test scenario. + +**`contribute` Method Tests:** + +- **Zero Contributions**: Verifies that the contract correctly rejects contribution attempts with +zero value, ensuring the integrity of the contribution process. +- **Funds Aggregation**: Tests the contract's ability to accurately aggregate contributions from +multiple addresses and update the `totalFundsRaised` accordingly. +- **Goal Achievement**: Checks for the `GoalReached` event emission upon meeting the funding goal, +confirming the contract's responsiveness to achieving its set target. + +Execute the test command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat test --network hardhat +``` + +```bash [yarn] +yarn hardhat test --network hardhat +``` + +```bash [pnpm] +pnpm exec hardhat test --network hardhat +``` + +```bash [bun] +bun run hardhat test --network hardhat +``` + +:: + +Upon completion, the test suite will provide a summary of all executed tests, +indicating their success or failure: + +```bash + CrowdfundingCampaign + Contribute + ✔ should reject contributions of 0 (45ms) + ✔ should aggregate contributions in totalFundsRaised (213ms) + ✔ should emit GoalReached event when funding goal is met (113ms) + + 3 passing (1s) +``` + +🎉 Congratulations! The `contribute` method of the `CrowdfundingCampaign` contract +has been thoroughly tested and is ready for action. diff --git a/content/00.build/10.zksync-101/_upgrading/_beacon/_foundry_beacon_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_beacon/_foundry_beacon_contract_upgradability.md new file mode 100644 index 00000000..6876ce71 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_beacon/_foundry_beacon_contract_upgradability.md @@ -0,0 +1,5 @@ +--- +title: Foundry | Contract Upgrade +--- + +Coming soon! diff --git a/content/00.build/10.zksync-101/_upgrading/_beacon/_hardhat_beacon_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_beacon/_hardhat_beacon_contract_upgradability.md new file mode 100644 index 00000000..d5c9365f --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_beacon/_hardhat_beacon_contract_upgradability.md @@ -0,0 +1,421 @@ +--- +title: Hardhat | Contract Upgrading +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-upgrade contract-upgrade-quickstart +cd contract-upgrade-quickstart +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you encounter an error while installing project dependencies using NPM as your package manager, try running `npm install --force`. +:: + +## Set up your wallet + +:display-partial{path = "/build/zksync-101/_partials/_setup-wallet"} + +--- + +## Adapt `CrowdfundingCampaign.sol` contract for upgradability + +To adapt our `CrowdfundingCampaign.sol` contract for upgradability, we are +transitioning to a proxy pattern. This approach separates the +contract's logic (which can be upgraded) from its persistent state +(stored in the proxy). + +In the `contracts/` directory you'll observe the refactored the [`CrowdfundingCampaign` contract](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaign.sol) +which initializes state variables through an +`initialize` function instead of the constructor, in line with the proxy pattern. + +**Updated Contract Structure:** + +```solidity [CrowdfundingCampaign.sol] +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/proxy/utils/Initializable.sol"; + +contract CrowdfundingCampaign is Initializable { + address public owner; + uint256 public fundingGoal; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + // Remove constructor in favour of initialize method + function initialize(uint256 _fundingGoal) public initializer { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + // contribution logic remains the same + } + + function withdrawFunds() public { + // withdrawFunds logic remains the same + } + + function getTotalFundsRaised() public view returns (uint256) { + // getTotalFundsRaised remains the same + } + + function getFundingGoal() public view returns (uint256) { + // getFundingGoal remains the same + } +} +``` + +**Key Modifications:** + +- **Initializable**: Inherits from OpenZeppelin's `Initializable` to ensure the `initialize` function +can only be called once, similar to a constructor. +- **Initialize Function**: Replaces the constructor for setting initial state, facilitating upgrades +through new logic contracts. +- **Proxy Pattern**: Utilizes a proxy contract to delegate calls to this logic contract, +allowing for future upgrades without losing the contract's state. + +This restructuring prepares the `CrowdfundingCampaign` contract for upgradeability. + +--- + +## Compile the updated `CrowdfundingCampaign` contract + +Now that the `CrowdfundingCampaign` contract is adapted for contract upgradability, let's proceed to deploy +the contract so we may upgrade it in later steps. Since we've made changes to our contract we will +need to re-compile. + +To compile the contracts in the project, run the following command: + +::code-group + +```bash [npm] +npm run compile +``` + +```bash [yarn] +yarn compile +``` + +```bash [pnpm] +pnpm run compile +``` + +```bash [bun] +bun run compile +``` + +:: + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 29 Solidity file +Successfully compiled 29 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +## Deploy the beacon and contract + +You'll find the necessary deployment script at [`/deploy/deployBeaconProxy.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/deployBeaconProxy.ts). + +```typescript [deployBeaconProxy.ts] +import { getWallet } from "./utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { ethers } from "ethers"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + const contractArtifact = await deployer.loadArtifact("CrowdfundingCampaign"); + const fundingGoalInWei = ethers.parseEther('0.1').toString(); + + const beacon = await hre.zkUpgrades.deployBeacon( + getWallet(), + contractArtifact + ); + await beacon.waitForDeployment(); + + const crowdfunding = await hre.zkUpgrades.deployBeaconProxy(deployer.zkWallet, + await beacon.getAddress(), contractArtifact, [fundingGoalInWei]); + + await crowdfunding.waitForDeployment(); +} +``` + +**Key Components:** + +- **`deployBeacon` Method:** Initiates the deployment of a beacon contract, +which acts as a central point for managing future upgrades of the `CrowdfundingCampaign` +contract. The beacon's address is a critical component as it links the deployed proxy +to the actual contract logic. + +- **`deployBeaconProxy` Method:** This step involves deploying the beacon proxy, +which serves as the user-facing contract instance. It references the beacon for its logic, +allowing for seamless upgrades without altering the proxy's address. +The `fundingGoalInWei parameter`, converted from ether to wei, is passed during +this step to initialize the contract with a funding goal. + +Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployBeaconProxy.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployBeaconProxy.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployBeaconProxy.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployBeaconProxy.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deployBeaconProxy.ts +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deployBeaconProxy.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployBeaconProxy.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployBeaconProxy.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract addresses of the implementation +contract, the admin contract, and the beacon +proxy contract. + +```bash +Beacon impl deployed at 0xE3F814fa915A75bA47230537726C99f6517Da58e +Beacon deployed at: 0x26410Bebf5Df7398DCBC5f00e9EBBa0Ddf471C72 +Beacon proxy deployed at: 0xD58FA9Fb362Abf69cFc68A3545fD227165DAc167 +``` + +--- + +## Compile the `CrowdfundingCampaignV2` Contract + +With our initial setup deployed, we're ready to upgrade our `CrowdfundingCampaign.sol` +contract by incorporating a deadline for contributions. This addition not only brings +a new layer of functionality but also introduces the concept of time-based conditions +through a [`modifier`](https://docs.soliditylang.org/en/latest/contracts.html#function-modifiers). + +**Current Contract Overview:** + +The existing version of our contract allows for open-ended contributions towards a +funding goal, without any time constraints. + +**Proposed Upgrade:** + +We're introducing a `deadline` variable, initialized at contract deployment, to establish a +clear timeframe for accepting contributions. The `withinDeadline` modifier will then enforce +this constraint, ensuring contributions are made within the allowed period. + +**Enhanced Contract:** + +The upgraded contract, [`CrowdfundingCampaignV2.sol`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaignV2.sol), +located in the `/contracts` directory, +incorporates these changes: + +- **Deadline Variable:** A new state variable deadline defines the campaign's end time, +enhancing the contract with time-based logic. + +- **Initialization Logic:** An additional initialization method, `initializeV2`, sets the deadline +based on a duration provided during the upgrade. This function ensures that the upgrade is +backward-compatible and maintains the contract's integrity. + +- **Contribution Logic with Deadline:** The `contribute` method now includes a `withinDeadline` modifier, +ensuring all contributions are made within the set timeframe. + +- **Deadline Enforcement:** The `withinDeadline` modifier checks the current time against the deadline, +safeguarding the contract from late contributions. + +**Deadline Extension Capability:** + +To provide flexibility, a new function allows the owner to extend the deadline, +offering adaptability to changing campaign needs. + +```solidity [CrowdfundingCampaignV2.sol] +function extendDeadline(uint256 _newDuration) public { + require(msg.sender == owner, "Only the owner can extend the deadline"); + deadline = block.timestamp + _newDuration; +} +``` + +This upgrade not only introduces the element of time to the campaign but also +exemplifies the use of `modifiers` for enforcing contract conditions. + +:display-partial{path = "/_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 4 Solidity file +Successfully compiled 4 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +## Upgrade to `CrowdfundingCampaignV2` + +This section describes the upgrade process to `CrowdfundingCampaignV2.sol` contract. Let's +start by reviewing the [`upgradeBeaconCrowdfundingCampaign.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/upgrade-scripts/upgradeBeaconCrowdfundingCampaign.ts) +script in the `deploy/upgrade-scripts` directory: + +Make sure to replace `YOUR_BEACON_ADDRESS_HERE` with the address of your deployed beacon and +`YOUR_PROXY_ADDRESS_HERE` with the actual address of your +deployed Beacon Proxy from the previous deployment step. + +```typescript [upgradeBeaconCrowdfundingCampaign.ts] +import { getWallet } from "../utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { HardhatRuntimeEnvironment } from "hardhat/types"; +import * as zk from 'zksync-ethers'; +import { Contract } from 'ethers'; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + // Placeholder for the deployed beacon address + const beaconAddress = 'YOUR_BEACON_ADDRESS_HERE'; + + const contractV2Artifact = await deployer.loadArtifact('CrowdfundingCampaignV2'); + + // Upgrade the proxy to V2 + await hre.zkUpgrades.upgradeBeacon(deployer.zkWallet, beaconAddress, contractV2Artifact); + + console.log('Successfully upgraded crowdfundingCampaign to crowdfundingCampaignV2'); + + const attachTo = new zk.ContractFactory( + crowdfundingCampaignV2.abi, + crowdfundingCampaignV2.bytecode, + deployer.zkWallet, + deployer.deploymentType, + ); + + // Placeholder for the deployed beacon proxy address + const proxyAddress = 'YOUR_PROXY_ADDRESS_HERE'; + + const upgradedContract = attachTo.attach(proxyAddress); + + upgradedContract.connect(deployer.zkWallet); + // wait some time before the next call + await new Promise((resolve) => setTimeout(resolve, 2000)); + + // Initialize V2 with a new campaign duration + const durationInSeconds = 30 * 24 * 60 * 60; // For example, setting a 30-day duration + const initTx = await upgradedContract.initializeV2(durationInSeconds); + const receipt = await initTx.wait(); + + console.log(`CrowdfundingCampaignV2 initialized. Transaction Hash: ${receipt.hash}`); +} +``` + +**Key Components:** + +- **`upgradeBeacon`**: This method from the `hre.zkUpgrades` module is used to update the beacon contract +with the new version of the contract logic, `CrowdfundingCampaignV2`. +It ensures that all proxies pointing to this beacon will now execute the updated contract code. +- **`initializeV2`:** This method is specifically called post-upgrade to initialize or reconfigure any new state +variables or logic introduced in the `CrowdfundingCampaignV2`. +Here, it's used to set a new campaign duration, seamlessly +integrating new functionalities while retaining the existing contract state and funds. + +Execute the test command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script upgrade-scripts/upgradeBeaconCrowdfundingCampaign.ts +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script upgrade-scripts/upgradeBeaconCrowdfundingCampaign.ts +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script upgrade-scripts/upgradeBeaconCrowdfundingCampaign.ts +``` + +```bash [bun] +bun run hardhat deploy-zksync --script upgrade-scripts/upgradeBeaconCrowdfundingCampaign.ts +``` + +:: + +Upon successful deployment, you'll receive output detailing the upgrade process, +including the new beacon address, and transaction hash: + +```bash +New beacon impl deployed at 0x58BD5adb462CF087E5838d53aE38A3Fe0EAf7A31 +Successfully upgraded crowdfundingCampaign to crowdfundingCampaignV2 0x26410Bebf5Df7398DCBC5f00e9EBBa0Ddf471C72 +CrowdfundingCampaignV2 initialized! 0x5f3131c77fcac19390f5f644a3ad1f0e7719dee4b4b5b4746c992de00db743f7 +Fundraising goal: 100000000000000000 +``` + +--- + +## Verify upgradable contracts + +For the verification of our upgradable contracts, it's essential to utilize the proxy address that was specified in our +upgrade script. + +To proceed with verification, execute the following command: + +::code-group + +```bash [npm] +npx hardhat verify +``` + +```bash [yarn] +yarn hardhat verify +``` + +```bash [pnpm] +pnpm exec hardhat verify +``` + +```bash [bun] +bun run hardhat verify +``` + +:: + +Upon successful verification, you'll receive output detailing the verification process: + +```bash +Verifying implementation: 0x58BD5adb462CF087E5838d53aE38A3Fe0EAf7A31 +Your verification ID is: 10547 +Contract successfully verified on zkSync block explorer! +Verifying beacon: 0x26410Bebf5Df7398DCBC5f00e9EBBa0Ddf471C72 +Your verification ID is: 10548 +Contract successfully verified on zkSync block explorer! +Verifying beacon proxy: 0xD58FA9Fb362Abf69cFc68A3545fD227165DAc167 +Your verification ID is: 10549 +Contract successfully verified on zkSync block explorer! +``` + +🎉 Congratulations! The `CrowdfundingCampaignV2` contract has been upgraded and verified! diff --git a/content/00.build/10.zksync-101/_upgrading/_beacon_proxy_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_beacon_proxy_contract_upgradability.md new file mode 100644 index 00000000..0c10b584 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_beacon_proxy_contract_upgradability.md @@ -0,0 +1,34 @@ +--- +title: Beacon Proxy Contract Upgradeability +--- + +### What is a beacon proxy upgradeable contract? +Beacon Proxy Upgradeable Contracts leverage a beacon to manage upgrades, allowing +for centralized logic updates across multiple proxies. The structure includes: + +1. **Beacon Contract**: Acts as the central point holding the address of the current logic contract. +It enables updating the logic for all associated proxies through a single transaction. +1. **Proxy Contracts**: These lightweight contracts delegate calls to the logic contract address +provided by the beacon, maintaining their own state and balance. +1. **Logic Contract**: Contains the executable business logic, which can be updated by changing +the beacon's reference without altering individual proxies. +1. **Admin Address**: Authorized to update the logic contract address in the beacon, ensuring controlled and secure upgrades. + +This arrangement allows multiple proxy contracts to be upgraded simultaneously by updating +the logic contract address in the beacon, streamlining the upgrade process. It preserves +the state and balance of each proxy contract, offering an efficient way to roll out new +features or fixes while maintaining a uniform interface for users. + +--- + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_upgrading/_beacon/_hardhat_beacon_contract_upgradability' +}, { + label: 'Foundry', + partial: '_upgrading/_beacon/_foundry_beacon_contract_upgradability' +}] +--- +:: diff --git a/content/00.build/10.zksync-101/_upgrading/_transparent/_foundry_transparent_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_transparent/_foundry_transparent_contract_upgradability.md new file mode 100644 index 00000000..6876ce71 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_transparent/_foundry_transparent_contract_upgradability.md @@ -0,0 +1,5 @@ +--- +title: Foundry | Contract Upgrade +--- + +Coming soon! diff --git a/content/00.build/10.zksync-101/_upgrading/_transparent/_hardhat_transparent_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_transparent/_hardhat_transparent_contract_upgradability.md new file mode 100644 index 00000000..1410701d --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_transparent/_hardhat_transparent_contract_upgradability.md @@ -0,0 +1,403 @@ +--- +title: Hardhat | Contract Upgrading +--- +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-upgrade contract-upgrade-quickstart +cd contract-upgrade-quickstart +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you encounter an error while installing project dependencies using NPM as your package manager, try running `npm install --force`. +:: + +## Set up your wallet + +:display-partial{path = "build/zksync-101/_partials/_setup-wallet"} + +--- + +## Adapt `CrowdfundingCampaign.sol` contract for upgradability + +To adapt our `CrowdfundingCampaign.sol` contract for upgradability, we're +transitioning to a proxy pattern. This approach separates the +contract's logic (which can be upgraded) from its persistent state +(stored in the proxy). + +### Refactoring for Proxy Compatibility + +In the `contracts/` directory you'll observe the refactored the [`CrowdfundingCampaign` contract](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaign.sol) +which initializes state variables through an +`initialize` function instead of the constructor, in line with the +Transparent Proxy pattern. + +**Updated Contract Structure:** + +```solidity [CrowdfundingCampaign.sol] +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import "@openzeppelin/contracts/proxy/utils/Initializable.sol"; + +contract CrowdfundingCampaign is Initializable { + address public owner; + uint256 public fundingGoal; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + // Remove constructor in favour of initialize method + function initialize(uint256 _fundingGoal) public initializer { + owner = msg.sender; + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + // contribution logic remains the same + } + + function withdrawFunds() public { + // withdrawFunds logic remains the same + } + + function getTotalFundsRaised() public view returns (uint256) { + // getTotalFundsRaised remains the same + } + + function getFundingGoal() public view returns (uint256) { + // getFundingGoal remains the same + } +} +``` + +**Key Modifications:** + +- **Initializable**: Inherits from OpenZeppelin's `Initializable` to ensure the `initialize` function +can only be called once, similar to a constructor. +- **Initialize Function**: Replaces the constructor for setting initial state, facilitating upgrades +through new logic contracts. +- **Proxy Pattern**: Utilizes a proxy contract to delegate calls to this logic contract, +allowing for future upgrades without losing the contract's state. + +This restructuring prepares the `CrowdfundingCampaign` contract for upgradability. + +--- + +## Deploy the `CrowdfundingCampaign` contract + +Now that the `CrowdfundingCampaign` contract is adapted for contract upgradability, let's proceed to deploy +the contract so we may upgrade it in later steps. Since we've made changes to our contract we will +need to re-compile. + +To compile the contracts in the project, run the following command: + +::code-group + +```bash [npm] +npm run compile +``` + +```bash [yarn] +yarn compile +``` + +```bash [pnpm] +pnpm run compile +``` + +```bash [bun] +bun run compile +``` + +:: + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 3 Solidity file +Successfully compiled 3 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +The deployment script is located at [`/deploy/deployTransparentProxy.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/deployTransparentProxy.ts). + +```typescript [deployTransparentProxy.ts] +import { getWallet } from "./utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { ethers } from "ethers"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + const contractArtifact = await deployer.loadArtifact("CrowdfundingCampaign"); + const fundingGoalInWei = ethers.parseEther('0.1').toString(); + // Deploy the contract using a transparent proxy + const crowdfunding = await hre.zkUpgrades.deployProxy( + getWallet(), + contractArtifact, + [fundingGoalInWei], + { initializer: 'initialize' } + ); + + await crowdfunding.waitForDeployment(); +} +``` + +**Key Components:** + +- **`hre.zkUpgrades.deployProxy`**: The method call to deploy the `CrowdfundingCampaign` +contract via a transparent proxy, leveraging Hardhat's runtime environment for zkSync upgrades. +This ensures the deployed contract can be upgraded in the future without losing its state or funds. +- **`initializer`**: Specifies the initialization method of the contract, `initialize` in this case, +which is required for setting up the proxy's state upon deployment. + +Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployTransparentProxy.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployTransparentProxy.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployTransparentProxy.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployTransparentProxy.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deployTransparentProxy.ts +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deployTransparentProxy.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployTransparentProxy.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployTransparentProxy.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract addresses of the implementation +contract, the admin contract, and the transparent +proxy contract. + +```bash +Implementation contract was deployed to 0xE3F814fa915A75bA47230537726C99f6517Da58e +Admin was deployed to 0x05198D9f93cBDfa3e332776019115512d8e0c809 +Transparent proxy was deployed to 0x68E8533acE01019CB8D07Eca822369D5De71b74D +``` + +--- + +## Upgrade the `CrowdfundingCampaign` Contract + +With our initial setup deployed, we're ready to update our `CrowdfundingCampaign.sol` +contract by incorporating a deadline for contributions. This addition not only brings +a new layer of functionality but also introduces the concept of time-based conditions +through a `modifier`. + +**Current Contract Overview:** + +The existing version of our contract allows for open-ended contributions towards a +funding goal, without any time constraints. + +**Proposed Upgrade:** + +We're introducing a `deadline` variable, initialized at contract deployment, to establish a +clear timeframe for accepting contributions. The `withinDeadline` modifier will then enforce +this constraint, ensuring contributions are made within the allowed period. + +**Enhanced Contract:** + +The upgraded contract, [`CrowdfundingCampaignV2.sol`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaignV2.sol), +located in the `/contracts` directory, incorporates these changes: + +- **Deadline Variable:** A new state variable deadline defines the campaign's end time, +enhancing the contract with time-based logic. + +- **Initialization Logic:** An additional initialization method, `initializeV2`, sets the deadline +based on a duration provided during the upgrade. This function ensures that the upgrade is +backward-compatible and maintains the contract's integrity. + +- **Contribution Logic with Deadline:** The `contribute` method now includes a `withinDeadline` modifier, +ensuring all contributions are made within the set timeframe. + +- **Deadline Enforcement:** The `withinDeadline` modifier checks the current time against the deadline, +safeguarding the contract from late contributions. + +**Deadline Extension Capability:** + +To provide flexibility, a new function allows the owner to extend the deadline, +offering adaptability to changing campaign needs. + +```solidity [CrowdfundingCampaignV2.sol] +function extendDeadline(uint256 _newDuration) public { + require(msg.sender == owner, "Only the owner can extend the deadline"); + deadline = block.timestamp + _newDuration; +} +``` + +This upgrade not only introduces the element of time to the campaign but also +exemplifies the use of [`modifiers`](https://docs.soliditylang.org/en/latest/contracts.html#function-modifiers) for enforcing contract conditions. + +### Compile contract +:display-partial{path = "_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 29 Solidity file +Successfully compiled 29 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +### Upgrade to `CrowdfundingCampaignV2` + +This section guides you through upgrading the `CrowdfundingCampaign` contract +to its second version, `CrowdfundingCampaignV2`. +Review the [`upgradeCrowdfundingCampaign.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/upgrade-scripts/upgradeCrowdfundingCampaign.ts) +script located within the `deploy/upgrade-scripts` directory to begin. + +Replace `YOUR_PROXY_ADDRESS_HERE` with the actual address of your +deployed Transparent Proxy from the previous deployment step. + +```typescript [upgradeCrowdfundingCampaign.ts] +import { getWallet } from "../utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + // Placeholder for the deployed proxy address + const proxyAddress = 'YOUR_PROXY_ADDRESS_HERE'; + + const contractV2Artifact = await deployer.loadArtifact('CrowdfundingCampaignV2'); + + // Upgrade the proxy to V2 + const upgradedContract = await hre.zkUpgrades.upgradeProxy(deployer.zkWallet, proxyAddress, contractV2Artifact); + + console.log('Successfully upgraded crowdfundingCampaign to crowdfundingCampaignV2'); + + upgradedContract.connect(deployer.zkWallet); + // wait some time before the next call + await new Promise((resolve) => setTimeout(resolve, 2000)); + + // Initialize V2 with a new campaign duration + const durationInSeconds = 30 * 24 * 60 * 60; // For example, setting a 30-day duration + const initTx = await upgradedContract.initializeV2(durationInSeconds); + const receipt = await initTx.wait(); + + console.log(`CrowdfundingCampaignV2 initialized. Transaction Hash: ${receipt.hash}`); +} +``` + +**Key Components:** + +- **`upgradeProxy`:** A critical method from the `hre.zkUpgrades` module that +performs the contract upgrade. It takes the wallet, the proxy address, and the +new contract artifact as arguments to transition the proxy to use the `CrowdfundingCampaignV2` logic. + +- **`initializeV2`:** Post-upgrade, this function is invoked to initialize the new +variables or logic introduced in `CrowdfundingCampaignV2`. In this example, +it sets a new campaign duration, illustrating how contract upgrades can add +functionalities without losing the existing state or funds. + +Execute the command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script upgrade-scripts/upgradeCrowdfundingCampaign.ts +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script upgrade-scripts/upgradeCrowdfundingCampaign.ts +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script upgrade-scripts/upgradeCrowdfundingCampaign.ts +``` + +```bash [bun] +bun run hardhat deploy-zksync --script upgrade-scripts/upgradeCrowdfundingCampaign.ts +``` + +:: + +Upon successful deployment, you'll receive output detailing the upgrade process, +including the contract address, and transaction hash: + +```bash +Contract successfully upgraded to 0x58BD5adb462CF087E5838d53aE38A3Fe0EAf7A31 with tx 0xe30c017c52376507ab55bb51bc27eb300832dc46b8b9ac14549d2f9014cee97e +Successfully upgraded crowdfundingCampaign to crowdfundingCampaignV2 +CrowdfundingCampaignV2 initialized! 0x5adfe360187195d98d3603a82a20ffe7304cd4dec030d1bdf456fa1690879668 +Fundraising goal: 100000000000000000 +``` + +--- + +## Verify upgradable contracts + +For the verification of our upgradable contracts, it's essential to utilize the proxy address that was specified in our +upgrade script. + +To proceed with verification, execute the following command: + +Replace with the actual proxy address from your deployment. + +::code-group + +```bash [npm] +npx hardhat verify +``` + +```bash [yarn] +yarn hardhat verify +``` + +```bash [pnpm] +pnpm exec hardhat verify +``` + +```bash [bun] +bun run hardhat verify +``` + +:: + +Upon successful verification, you'll receive output detailing the verification process: + +```bash +Verifying implementation: 0x58BD5adb462CF087E5838d53aE38A3Fe0EAf7A31 +Your verification ID is: 10543 +Contract successfully verified on zkSync block explorer! +Verifying proxy: 0x68E8533acE01019CB8D07Eca822369D5De71b74D +Your verification ID is: 10544 +Contract successfully verified on zkSync block explorer! +Verifying proxy admin: 0x05198D9f93cBDfa3e332776019115512d8e0c809 +Your verification ID is: 10545 +Contract successfully verified on zkSync block explorer! +``` + +🎉 Congratulations! The `CrowdfundingCampaignV2` contract has been upgraded and verified! diff --git a/content/00.build/10.zksync-101/_upgrading/_transparent_proxy_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_transparent_proxy_contract_upgradability.md new file mode 100644 index 00000000..e9cfbbe3 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_transparent_proxy_contract_upgradability.md @@ -0,0 +1,33 @@ +--- +title: Transparent Upgradeable Proxy Contract +--- + +### What is a transparent upgradeable proxy contract? +Transparent upgradeable contracts utilize the proxy pattern to facilitate post-deployment +logic updates while preventing accidental function collisions. They consist of: + +1. **Proxy Contract**: Manages storage, balance, and delegates calls to the logic contract, +except for those by the admin, ensuring clear separation between user and administrative interactions. +1. **Logic Contract**: Houses the actual business logic, upgradeable by swapping out for new versions. +1. **Admin Address**: Holds the rights to upgrade the logic contract, with its commands executed +exclusively by the proxy to prevent unintended logic execution. + +This setup ensures only non-administrative calls reach the logic contract, allowing +for safe and seamless upgrades. By switching the logic contract to a newer version +while keeping the original proxy intact, the contract's state and balance are preserved. +This facilitates improvements or bug fixes without changing the proxy, maintaining a +consistent user interface. + +--- + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_upgrading/_transparent/_hardhat_transparent_contract_upgradability' +}, { + label: 'Foundry', + partial: '_upgrading/_transparent/_foundry_transparent_contract_upgradability' +}] +--- +:: diff --git a/content/00.build/10.zksync-101/_upgrading/_uups/_foundry_uups_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_uups/_foundry_uups_contract_upgradability.md new file mode 100644 index 00000000..6876ce71 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_uups/_foundry_uups_contract_upgradability.md @@ -0,0 +1,5 @@ +--- +title: Foundry | Contract Upgrade +--- + +Coming soon! diff --git a/content/00.build/10.zksync-101/_upgrading/_uups/_hardhat_uups_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_uups/_hardhat_uups_contract_upgradability.md new file mode 100644 index 00000000..2ab74333 --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_uups/_hardhat_uups_contract_upgradability.md @@ -0,0 +1,405 @@ +--- +title: Hardhat | Contract Upgrading +--- + +Run the following command in your terminal to initialize the project. + +```sh +npx zksync-cli@latest create --template qs-upgrade contract-upgrade-quickstart +cd contract-upgrade-quickstart +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you encounter an error while installing project dependencies using NPM as your package manager, try running `npm install --force`. +:: + +## Set up your wallet + +:display-partial{path="build/zksync-101/_partials/_setup-wallet"} + +--- + +## Adapt the `CrowdfundingCampaign.sol` for UUPS Upgradability + +To align the `CrowdfundingCampaign.sol` contract with UUPS (Universal Upgradeable Proxy Standard) upgradability, +we're integrating OpenZeppelin's UUPSUpgradeable contracts. This method offers a more secure and gas-efficient +approach to contract upgrades by embedding the upgrade logic within the contract itself. + +### Refactoring for UUPS Compatibility + +We've refactored the contract to support UUPS upgradability, ensuring the contract's logic +is upgradeable while maintaining a persistent state. This is achieved by utilizing initializer +functions and the UUPS upgrade mechanism. + +**UUPS-Enabled Contract Structure:** + +```solidity [CrowdfundingCampaign_UUPS.sol] +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +// Import UUPS from OpenZeppelin +import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; +import "@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol"; +import "@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol"; + +contract CrowdfundingCampaign_UUPS is Initializable, UUPSUpgradeable, OwnableUpgradeable { + uint256 public fundingGoal; + uint256 public totalFundsRaised; + mapping(address => uint256) public contributions; + + event ContributionReceived(address contributor, uint256 amount); + event GoalReached(uint256 totalFundsRaised); + + // Initializer function, replaces constructor for upgradeable contracts + function initialize(uint256 _fundingGoal) public initializer { + __Ownable_init(); // Initialize ownership to the deployer + __UUPSUpgradeable_init(); // Initialize UUPS upgradeability + + fundingGoal = _fundingGoal; + } + + function contribute() public payable { + // Contribution logic remains the same + } + + function withdrawFunds() public onlyOwner { + // WithdrawFunds logic remains the same + } + + function getTotalFundsRaised() public view returns (uint256) { + // getTotalFundsRaised remains the same + } + + function getFundingGoal() public view returns (uint256) { + // getFundingGoal remains the same + } + + // Ensure only the owner can upgrade the contract + function _authorizeUpgrade(address newImplementation) internal override onlyOwner {} +} +``` + +**Key Adaptations:** + +- **Initializable & UUPSUpgradeable**: The contract inherits from `Initializable` and `UUPSUpgradeable`, +ensuring initialization follows the proxy pattern and enabling the UUPS upgrade mechanism. +- **OwnableUpgradeable**: Utilizes `OwnableUpgradeable` to manage ownership through an initializer, +important for secure upgrade authorization. +- **_authorizeUpgrade**: A safeguard function ensuring only the contract owner can perform upgrades, +reinforcing the contract's security. + +By adopting the UUPS pattern, the [`CrowdfundingCampaign_UUPS`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaign_UUPS.sol) +contract becomes efficiently upgradeable, offering enhanced security and reduced gas costs, setting a solid foundation for future enhancements. + +--- + +## Compile the `CrowdfundingCampaign_UUPS` contract + +Now that the `CrowdfundingCampaign_UUPS` contract is adapted for contract upgradability, let's proceed to deploy +the contract so we may upgrade it in later steps. Since we've made changes to our contract we will +need to re-compile. + +To compile the contracts in the project, run the following command: + +::code-group + +```bash [npm] +npm run compile +``` + +```bash [yarn] +yarn compile +``` + +```bash [pnpm] +pnpm run compile +``` + +```bash [bun] +bun run compile +``` + +:: + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 4 Solidity file +Successfully compiled 4 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +## Deploy the updated contract + +The script to deploy the `CrowdfundingCampaign_UUPS` contract is located at [`/deploy/deployUUPS.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/deployUUPS.ts). + +```typescript [deployUUPS.ts] +import { getWallet } from "./utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { ethers } from "ethers"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + const contractArtifact = await deployer.loadArtifact("CrowdfundingCampaign_UUPS"); + const fundingGoalInWei = ethers.parseEther('0.1').toString(); + + const crowdfunding = await hre.zkUpgrades.deployProxy( + getWallet(), + contractArtifact, + [fundingGoalInWei], + { initializer: 'initialize' } + ); + + await crowdfunding.waitForDeployment(); +} +``` + +**Key Components:** + +- **`deployProxy` Method:** This method is responsible for deploying the `CrowdfundingCampaign` +contract as a UUPS upgradeable contract. It initializes the contract with the specified parameters, +such as the `fundingGoalInWei`, ensuring that the contract is ready for immediate use after deployment. +The use of the UUPS pattern provides a secure and efficient mechanism for future upgrades. + +- **`initializer` Option:** Specifies the initialization method of the contract, in this case, `initialize`. +This is used for setting up the initial state of the contract upon deployment, particularly important +for upgradeable contracts where constructor usage is not possible. + +Execute the deployment command corresponding to your package manager. The default command +deploys to the configured network in your Hardhat setup. For local deployment, append +`--network inMemoryNode` to deploy to the local in-memory node running. + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script deployUUPS.ts +# To deploy the contract on local in-memory node: +# npx hardhat deploy-zksync --script deployUUPS.ts --network inMemoryNode +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script deployUUPS.ts +# To deploy the contract on local in-memory node: +# yarn hardhat deploy-zksync --script deployUUPS.ts --network inMemoryNode +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script deployUUPS.ts +# To deploy the contract on local in-memory node: +# pnpm exec hardhat deploy-zksync --script deployUUPS.ts --network inMemoryNode +``` + +```bash [bun] +bun run hardhat deploy-zksync --script deployUUPS.ts +# To deploy the contract on local in-memory node: +# bun run hardhat deploy-zksync --script deployUUPS.ts --network inMemoryNode +``` + +:: + +Upon successful deployment, you'll receive output detailing the deployment process, +including the contract addresses of the implementation +contract, the admin contract, and the transparent +proxy contract. + +```bash +Implementation contract was deployed to 0xF0De77041F3cF6D9C905A10ce59858b17E57E3B9 +UUPS proxy was deployed to 0x56882194aAe8E4B6d18cD84e4D7B0F807e0100Cb +``` + +--- + +## Upgrade to the `CrowdfundingCampaignV2_UUPS` Contract + +With our initial setup deployed, we're ready to upgrade our `CrowdfundingCampaign_UUPS.sol` +contract by incorporating a deadline for contributions. This addition not only brings +a new layer of functionality but also introduces the concept of time-based conditions +through a `modifier`. + +**Current Contract Overview:** + +The existing version of our contract allows for open-ended contributions towards a +funding goal, without any time constraints. + +**Proposed Upgrade:** + +We're introducing a `deadline` variable, initialized at contract deployment, to establish a +clear timeframe for accepting contributions. The `withinDeadline` modifier will then enforce +this constraint, ensuring contributions are made within the allowed period. + +**Enhanced Contract:** + +The upgraded contract, [`CrowdfundingCampaignV2_UUPS.sol`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/contracts/CrowdfundingCampaignV2_UUPS.sol), +located in the `/contracts` directory, incorporates these changes: + +- **Deadline Variable:** A new state variable deadline defines the campaign's end time, +enhancing the contract with time-based logic. + +- **Initialization Logic:** An additional initialization method, `initializeV2`, sets the deadline +based on a duration provided during the upgrade. This function ensures that the upgrade is +backward-compatible and maintains the contract's integrity. + +- **Contribution Logic with Deadline:** The `contribute` method now includes a `withinDeadline` modifier, +ensuring all contributions are made within the set timeframe. + +- **Deadline Enforcement:** The `withinDeadline` modifier checks the current time against the deadline, +safeguarding the contract from late contributions. + +**Deadline Extension Capability:** + +To provide flexibility, a new function allows the owner to extend the deadline, +offering adaptability to changing campaign needs. + +```solidity [CrowdfundingCampaignV2_UUPS.sol] +function extendDeadline(uint256 _newDuration) public { + require(msg.sender == owner, "Only the owner can extend the deadline"); + deadline = block.timestamp + _newDuration; +} +``` + +This upgrade not only introduces the element of time to the campaign but also +exemplifies the use of `modifiers` for enforcing contract conditions. + +### Compile the `CrowdfundingCampaignV2_UUPS` contract + +:display-partial{path = "_partials/_compile-solidity-contracts"} + +Upon successful compilation, you'll receive output detailing the +`zksolc` and `solc` versions used during compiling and the number +of Solidity files compiled. + +```bash +Compiling contracts for zkSync Era with zksolc v1.4.0 and solc v0.8.17 +Compiling 4 Solidity file +Successfully compiled 4 Solidity file +``` + +The compiled artifacts will be located in the `/artifacts-zk` folder. + +### Upgrade to `CrowdfundingCampaignV2_UUPS` + +This section describes the initiating the upgrade to `CrowdfundingCampaignV2_UUPS.sol` contract. +Let's start by reviewing the [`upgradeUUPSCrowdfundingCampaign.ts`](https://github.com/matter-labs/zksync-contract-templates/blob/main/templates/quickstart/hardhat/upgradability/deploy/upgrade-scripts/upgradeUUPSCrowdfundingCampaign.ts) +script in the `deploy/upgrade-scripts` directory: + +Replace `YOUR_PROXY_ADDRESS_HERE` with the actual address of your +deployed Transparent Proxy from the previous deployment step. + +```typescript [upgradeUUPSCrowdfundingCampaign.ts] +import { getWallet } from "../utils"; +import { Deployer } from '@matterlabs/hardhat-zksync'; +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +export default async function (hre: HardhatRuntimeEnvironment) { + const wallet = getWallet(); + const deployer = new Deployer(hre, wallet); + + // Placeholder for the deployed proxy address + const proxyAddress = 'YOUR_PROXY_ADDRESS_HERE'; + + // Upgrade the proxy to V2 + const contractV2Artifact = await deployer.loadArtifact('CrowdfundingCampaignV2_UUPS'); + const upgradedContract = await hre.zkUpgrades.upgradeProxy(deployer.zkWallet, proxyAddress, contractV2Artifact); + console.log('Successfully upgraded crowdfundingCampaign_UUPS to crowdfundingCampaignV2_UUPS'); + + upgradedContract.connect(deployer.zkWallet); + // wait some time before the next call + await new Promise((resolve) => setTimeout(resolve, 2000)); + + const durationInSeconds = 30 * 24 * 60 * 60; // For example, setting a 30-day duration + + const initTx = await upgradedContract.initializeV2(durationInSeconds); + const receipt = await initTx.wait(); + + console.log('CrowdfundingCampaignV2_UUPS initialized!', receipt.hash); +} +``` + +**Key Components:** + +- **`upgradeProxy`:** A critical method from the `hre.zkUpgrades` module that +performs the contract upgrade. It takes the wallet, the proxy address, and the +new contract artifact as arguments to transition the proxy to use the `CrowdfundingCampaignV2_UUPS` logic. + +- **`initializeV2`:** Post-upgrade, this function is invoked to initialize the new +variables or logic introduced in `CrowdfundingCampaignV2_UUPS`. In this example, +it sets a new campaign duration, illustrating how contract upgrades can add +functionalities without losing the existing state or funds. + +Execute the test command corresponding to your package manager: + +::code-group + +```bash [npm] +npx hardhat deploy-zksync --script upgrade-scripts/upgradeUUPSCrowdfundingCampaign.ts +``` + +```bash [yarn] +yarn hardhat deploy-zksync --script upgrade-scripts/upgradeUUPSCrowdfundingCampaign.ts +``` + +```bash [pnpm] +pnpm exec hardhat deploy-zksync --script upgrade-scripts/upgradeUUPSCrowdfundingCampaign.ts +``` + +```bash [bun] +bun run hardhat deploy-zksync --script upgrade-scripts/upgradeUUPSCrowdfundingCampaign.ts +``` + +:: + +Upon successful deployment, you'll receive output detailing the upgrade process, +including the new beacon address, and transaction hash: + +```bash +Contract successfully upgraded to 0x9BE22706966D717d7b0C8aEC99A1a9d1b3bFeC50 with tx 0x24ad582828b23b98d207ec7c057cd6a9c911bea22dbe85e0affd7479b00d90e9 +Successfully upgraded crowdfundingCampaign_UUPS to crowdfundingCampaignV2_UUPS +CrowdfundingCampaignV2_UUPS initialized! 0xab959f588b64dc6dee1e94d5fa0da2ae205c7438cf097d26d3ba73690e2b09e8 +``` + +--- + +## Verify upgradable contracts + +To verify our upgradable contracts we need to the proxy address we previously used in our upgrade script. +With that execute the following command: + +::code-group + +```bash [npm] +npx hardhat verify +``` + +```bash [yarn] +yarn hardhat verify +``` + +```bash [pnpm] +pnpm exec hardhat verify +``` + +```bash [bun] +bun run hardhat verify +``` + +:: + +Upon successful verification, you'll receive output detailing the verification process: + +```bash +Verifying implementation: 0x9BE22706966D717d7b0C8aEC99A1a9d1b3bFeC50 +Your verification ID is: 10618 +Contract successfully verified on zkSync block explorer! +Verifying proxy: 0x91921fDb0F8942c18eCeE4E3896b369ca0650483 +Your verification ID is: 10619 +Contract successfully verified on zkSync block explorer! +``` + +🎉 Congratulations! The `CrowdfundingCampaignV2_UUPS` contract has been upgraded and verified! diff --git a/content/00.build/10.zksync-101/_upgrading/_uups_contract_upgradability.md b/content/00.build/10.zksync-101/_upgrading/_uups_contract_upgradability.md new file mode 100644 index 00000000..44c6049a --- /dev/null +++ b/content/00.build/10.zksync-101/_upgrading/_uups_contract_upgradability.md @@ -0,0 +1,35 @@ +--- +title: UUPS Proxy Contract Upgradeability +--- + +### What is a UUPS upgradeable contract? +UUPS (Universal Upgradeable Proxy Standard) Upgradeable Contracts embed the upgrade logic +within the contract itself, simplifying upgrades and enhancing security. The components are: + +1. **Proxy Contract**: Contains minimal logic, primarily delegating calls to the implementation +contract. Unlike other proxies, it doesn't require a separate upgrade function. +1. **Implementation Contract**: Houses the business logic and the upgrade functionality, +enabling the contract to upgrade itself from within. +1. **Admin Role**: Assigned to an entity with the authority to initiate upgrades, ensuring +controlled access to the upgrade function. + +In UUPS contracts, upgrades are performed by invoking the upgrade function within the +implementation contract, which updates the proxy's reference to point to a new implementation. +This self-contained approach minimizes the proxy's complexity and gas costs, while the +implementation contract's built-in upgrade mechanism ensures only authorized upgrades. +The contract's state remains intact across upgrades, facilitating continuous improvement +with a stable user experience. + +--- + +::content-switcher +--- +items: [{ + label: 'Hardhat', + partial: '_upgrading/_uups/_hardhat_uups_contract_upgradability' +}, { + label: 'Foundry', + partial: '_upgrading/_uups/_foundry_uups_contract_upgradability' +}] +--- +:: diff --git a/content/00.build/40.tooling/00.zksync-block-explorers.md b/content/00.build/40.tooling/00.zksync-block-explorers.md new file mode 100644 index 00000000..f0c9578b --- /dev/null +++ b/content/00.build/40.tooling/00.zksync-block-explorers.md @@ -0,0 +1,62 @@ +--- +title: Block Explorers +description: Learn about the official and 3rd party resources for exploring the zkSync Era network. +--- + +The [zkSync Era Block Explorer](%%zk_mainnet_block_explorer_url%%) +details comprehensive data about transactions, blocks, batches, wallets, tokens, and smart contracts on the zkSync Era network. + +## Block Explorer API + +We’ve developed the zkSync Era Block Explorer API for developers to access zkSync Era Block Explorer data directly via HTTP requests. + +- [Mainnet Block Explorer API](https://block-explorer-api.mainnet.zksync.io/docs) +- [Testnet Block Explorer API](https://block-explorer-api.sepolia.zksync.dev/docs) + +The API provides various endpoints for many use cases you might want in your app. +It is compatible with [Etherscan API](https://docs.etherscan.io/), +which makes it easy to transition your existing apps to zkSync Era network. + +Feel free to contribute and create issues and feature requests in [zkSync Era Block Explorer GitHub repo](%%zk_git_repo_block-explorer%%). + +## Other block explorers + +A full list of zkSync block explorers can be found on the zkSync website's [Block Explorers page](https://zksync.io/explore#explorers). + +### Etherscan - zkSync Era Explorer + +Etherscan allows you to explore and search the zkSync Era network +for transactions, addresses, tokens, prices and other activities taking place on the Network. + +- [Etherscan Mainnet](https://era.zksync.network/) +- [Etherscan Testnet](https://sepolia-era.zksync.network/) + +### L2Scan + +L2Scan is the open source block explorer for zkSync by the Unifra team + +- [L2Scan Mainnet](https://zksync-era.l2scan.co/) +- [L2Scan Testnet](https://zksync-era-sepolia.l2scan.co/) + +### Blockscout + +Blockscout is a blockchain explorer for inspecting, analyzing, and interacting with zkSync. + +- [Blockscout Mainnet](https://zksync.blockscout.com/) +- [Blockscout Testnet](https://zksync-sepolia.blockscout.com/) + +### Hyperscan + +Routescan's zkSync Explorer allows you to explore and search for transactions, addresses, tokens, prices and other activities taking place on zkSync. + +- [Hyperscan](https://hyperscan.xyz/) + +### OKLink + +[OKLink](https://www.oklink.com/zksync) provides a familiar UI with data on transactions, blocks, account balances and more. + +### NFTScan + +[NFT Explorer](https://zksync.nftscan.com/) provides developers and users +with professional NFT asset data search and query functions, including: NFT Collection, NFT Contract, Wallet Address, +NFT TxHash and other multi-dimensional data search queries. diff --git a/content/00.build/40.tooling/10.zksync-cli/00.index.md b/content/00.build/40.tooling/10.zksync-cli/00.index.md new file mode 100644 index 00000000..b3a8a4e9 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/00.index.md @@ -0,0 +1,51 @@ +--- +title: Getting Started +description: Learn how to use the powerful zkSync CLI tool for local development. +--- + +The zkSync Command Line Interface (CLI) is a powerful tool designed to simplify the development and interaction with zkSync from a command shell. + +## Usage + +You can run commands locally without installation using the following command: `npx zksync-cli`. For example: `npx zksync-cli dev start`. + +### Install zksync-cli + +You can alternatively install the CLI globally using the npm package manager: + +```bash +npm install -g zksync-cli +``` + +::callout{icon="i-heroicons-light-bulb" color="blue"} +If you go the route of installing the zksync-cli package locally, we recommend using the global option. +:: + +### Update zksync-cli + +If you installed `zksync-cli` via the npm package manager, you can update with the following: + +```bash +npm update -g zksync-cli +``` + +## Available Commands + +- [`dev`](zksync-cli/zksync-cli-dev): Start a local development environment with zkSync and Ethereum nodes. +- [`create`](zksync-cli/zksync-cli-create): Scaffold new projects using templates for frontend, contracts, and scripting. +- [`contract`](zksync-cli/zksync-cli-contract): Read and write data to zkSync contracts without building UI. +- [`transaction`](zksync-cli/zksync-cli-transaction): Fetch and display detailed information about a specific transaction. +- [`wallet`](zksync-cli/zksync-cli-wallet): Manage zkSync wallet assets, including transfers and balance checks. +- [`bridge`](zksync-cli/zksync-cli-bridge): Perform deposits and withdrawals between Ethereum and zkSync. +- [`config chains`](zksync-cli/zksync-cli-config-chains): Add or edit custom chains for flexible testing and development. + +## Further Assistance + +Need help? Join our [GitHub Discussions](%%zk_git_repo_zksync-developers%%/discussions/) +to ask questions, share your experiences, and connect with the zkSync community. + +## Source Code + +The [zkSync CLI project](%%zk_git_repo_zksync-cli%%) +is open-source and available on GitHub under the MIT License. +Feel free to contribute, report issues, or suggest new features to help us improve the tool for everyone. diff --git a/content/00.build/40.tooling/10.zksync-cli/01.troubleshooting.md b/content/00.build/40.tooling/10.zksync-cli/01.troubleshooting.md new file mode 100644 index 00000000..01923afe --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/01.troubleshooting.md @@ -0,0 +1,68 @@ +--- +title: Troubleshooting +description: Get help with issues related to zksync-cli. +--- + +Encountering issues with zkSync CLI? Here are some common problems and step-by-step recommendations for resolving them: + +## `command not found: zksync-cli` + +If you try to use the `zksync-cli` command and receive the `command not found` message, double check if you +have the package installed locally or were using the `npx zksync-cli` command. + +--- + +## `unknown command` Error + +If you encounter an `unknown command` error, follow these steps: + +a. **Check the zkSync CLI Version** + +- Run `zksync-cli --version` to check your current version. +- Compare it with the latest version available on [npm](https://www.npmjs.com/package/zksync-cli). +- If your version is lower than the one on npm, follow the steps below. +If your version is up-to-date, it's possible that the command was moved or renamed. +Use `zksync-cli help` for a list of current commands or refer to the documentation. + +b. **Verify Local Installation** + +- Use `npm list zksync-cli` to check if `zksync-cli` is installed in the current directory or any parent directories from where you are running your terminal. +- If it is indeed installed, make sure to uninstall it by running `npm uninstall zksync-cli` in its installation location. +Remove all instances of `zksync-cli` until none are found by `npm list zksync-cli`. + +c. **Verify Global Installation** + +- Use `npm list -g zksync-cli` to check if `zksync-cli` is installed globally. +- If it is installed globally, uninstall it using `npm uninstall -g zksync-cli`. + +d. **Clean npm Cache** + +- Run `npm cache clean --force`. + +e. **Use the Latest Version** + +- As a quick fix, or if the above steps don't resolve the issue, use `npx zksync-cli@latest [command]`, for example, `npx zksync-cli@latest dev start`. + +--- + +## My version is outdated + +If `zksync-cli` is not running the latest version: + +- Refer to the [instructions for `unknown command` Error](/build/tooling/zksync-cli/troubleshooting#unknown-command-error) +above to check and update your zkSync CLI version. + +--- + +## `command not found: npx` Error + +If you receive a `command not found: npx` error, it means Node.js is not installed or not correctly set up on your system: + +- Install Node.js from [the official website](https://nodejs.org/). This will also install `npm` and `npx`. +- After installation, restart your terminal and try running `npx zksync-cli` again. + +--- + +## Other issues + +For all other issues, we encourage you to seek help or report them in our [GitHub Discussions](%%zk_git_repo_zksync-developers%%/discussions/new?category=general&title=[zksync-cli]%20). diff --git a/content/00.build/40.tooling/10.zksync-cli/10.zksync-cli-dev.md b/content/00.build/40.tooling/10.zksync-cli/10.zksync-cli-dev.md new file mode 100644 index 00000000..5ff8e7a4 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/10.zksync-cli-dev.md @@ -0,0 +1,52 @@ +--- +title: zksync-cli dev +description: Manage a local node with zksync-cli. +--- + +Utilize `zksync-cli` to effortlessly initiate a local development environment. +Using the command, `zksync-cli dev start`, you can spin up local zkSync and Ethereum nodes, along with Block Explorer, Wallet, and Bridge +for a seamless development experience. + +## Prerequisites + +Before beginning, ensure you have installed: + +- [Node.js](https://nodejs.org) v18+ +- [Git](https://git-scm.com/downloads) +- [Docker](https://www.docker.com/get-started/) + +## Starting and Stopping Nodes + +- **Start**: `zksync-cli dev start` initiates your local environment. On the first run, it prompts to select a node and additional modules. +- **Stop**: `zksync-cli dev stop` terminates the local environment. Use `zksync-cli dev stop [module name]` to stop specific modules. +- **Restart**: `zksync-cli dev restart` or `zksync-cli dev restart [module name]` restarts your environment or specific modules. + +## Configuring Your Environment + +- `zksync-cli dev config` allows the selection of nodes and additional modules like block explorer and bridge. +Run modules such as Block Explorer against an already running node by adding a new chain. + +## Managing Modules + +- `zksync-cli dev modules` lists all installed modules, providing a clear overview of your environment's components. + +## Viewing Logs + +- `zksync-cli dev logs` displays logs for all active modules, essential for monitoring and debugging. + +## Cleaning Modules + +- `zksync-cli dev clean` removes all module data from your computer. For specific modules, use `zksync-cli dev clean [module name]`. + +## Updating Modules + +- `zksync-cli dev update [module name]` updates individual modules, ensuring you're running the latest versions. + +## Troubleshooting + +If modules malfunction, e.g., failing to start: + +1. Use `zksync-cli dev stop` to cease all operations. +2. Reinstall the problematic module with `zksync-cli dev clean [module name]`. +3. Restart with `zksync-cli dev start`. Check Docker container logs for detailed errors, accessible through Docker Desktop. +4. Persisting issues? Please report them in our [GitHub discussions](%%zk_git_repo_zksync-developers%%/discussions/new?category=general&title=[zksync-cli]%20<Title>). diff --git a/content/00.build/40.tooling/10.zksync-cli/20.zksync-cli-create.md b/content/00.build/40.tooling/10.zksync-cli/20.zksync-cli-create.md new file mode 100644 index 00000000..c8ed4a25 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/20.zksync-cli-create.md @@ -0,0 +1,48 @@ +--- +title: zksync-cli create +description: Use the zksync-cli create command to streamline project setup. +--- + +The `zksync-cli create` command streamlines project setup by offering templates for frontend development, smart contracts, +and scripting for zkSync, enabling rapid deployment and development. + +### Prerequisites + +Ensure you have the following installed before you start: + +- [Node.js](https://nodejs.org) v18+ +- [Git](https://git-scm.com/downloads) + +## Available Templates + +`zksync-cli create` simplifies the initial project setup by providing templates in three main categories: + +### Frontend + +Fast-track your UI development with our frontend templates, supporting popular frameworks like Vue, React, Next.js, Nuxt, and Vite. +Options include viem, ethers, web3modal, rainbowkit, and more, equipping you with the necessary tools for dApp development. + +zkSync Frontend Templates repo can be found [here](%%zk_git_repo_zksync-frontend-templates%%#readme) + +### Contracts + +For smart contract development, choose from templates designed for quick deployment and testing, compatible with Solidity or Vyper. +Utilize tools like Hardhat to streamline your workflow. + +zkSync Contract Templates repo can be found [here](%%zk_git_repo_zksync-contract-templates%%#readme) + +### Scripting + +Enhance your project with Node.js scripting templates for automated interactions and advanced zkSync operations. +Includes examples of wallet or contract interactions using viem, ethers, or web3.js. + +zkSync Scripting Examples repo can be found [here](%%zk_git_repo_zksync-scripting-templates%%#readme) + +## Using Templates + +To create a project using a template, run the following command and follow the prompts to select your desired template category +and specific framework or tool: + +```bash +zksync-cli create +``` diff --git a/content/00.build/40.tooling/10.zksync-cli/30.zksync-cli-contract.md b/content/00.build/40.tooling/10.zksync-cli/30.zksync-cli-contract.md new file mode 100644 index 00000000..11cd05d3 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/30.zksync-cli-contract.md @@ -0,0 +1,245 @@ +--- +title: zksync-cli contract +description: Interact with contracts using the zksync-cli contract command. +--- + +The `zksync-cli contract` command comes with actions to read, write and encode smart contracts on zkSync. +These commands automate tasks such as method verification, ABI handling, output decoding, and proxy contract processing. + +::callout{icon="i-heroicons-light-bulb" color="blue"} +After running a command with prompts you will see a full command with all the options that you can copy +and use later to quickly run the same command again. +:: + +## Contract Read + +The `zksync-cli contract read` command executes read-only methods on contracts, like checking ERC-20 balances or contract states. + +### Read Options + +You do not need to specify options below, you will be prompted to enter them if they are not specified. + +- `--chain <chain-name>`: Select the chain to use (e.g., `zksync-mainnet`, `zksync-sepolia`). +- `--rpc <url>`: Provide RPC URL instead of selecting a chain +- `--contract <address>`: Specify contract's address +- `--method <method-signature>`: Defines the contract method to interact with +- `--arguments <method-arguments...>`: Pass arguments to the contract method +- `--data <0x-transaction-data>`: Instead of specifying the method and arguments, you can pass the raw transaction data +- `--outputTypes <output-types...>`: Specifies output types for decoding +- `--from <address>`: Call method on behalf of specified address +- `--abi <path>`: Path to local ABI file or contract artifact +- `--decode-skip`: Skips prompting for output types and decoding the response +- `--show-info`: Displays transaction request information (e.g. encoded transaction data) + +### Basic read example + +```bash +zksync-cli contract read +``` + +1. You will be prompted to select a chain, contract address, and method. + + ```bash + ? Chain to use: %%zk_testnet_name%% + ? Contract address: 0x45E6dC995113fd3d1A3b1964493105B9AA9a9A42 + ``` + +1. Next you need to select a **method (function) to call**. + + - In case your contract is verified it will automatically identify the ABI: + + ```bash + ? Contract method to call + ────────── Provided contract ────────── + ❯ balanceOf(address account) view returns (uint256) + decimals() pure returns (uint8) + name() pure returns (string) + symbol() pure returns (string) + totalSupply() view returns (uint256) + ─────────────────────────────────────── + Type method manually + ``` + + - Otherwise you'll have to enter method signature manually. + + ```bash + ? Enter method to call: balanceOf(address) + ``` + + - Alternatively, you can specify the ABI file manually using the `--abi` option. [See example](#using-a-local-abi-file) + +1. You will be prompted to enter **arguments** for the method, one by one. + + ```bash + ? Provide method arguments: + ? [1/1] account (address): 0xa1cf087DB965Ab02Fb3CFaCe1f5c63935815f044 + ``` + + When submitted a contract call will be made and you'll see the response in it's original encoded form. + + ```bash + ✔ Method response (raw): 0x000000000000000000000000000000000000000000010508e606548a9e5d2000 + ``` + +1. you will be asked the **method output** type to decode the response. +You can skip this step by submitting empty response or completely skip it by passing `--decode-skip` option. + + ```bash + ? Output types: uint256 + ✔ Decoded method response: 1232701801010000000000000 + ``` + +### Running read on behalf of another address + +You can specify the `--from` option to run the method on behalf of another address. +This is useful when you need to call a method that expects a specific address as `msg.sender`. + +```bash +zksync-cli contract read \ + --from "0xa1cf087DB965Ab02Fb3CFaCe1f5c63935815f044" +``` + +## Contract Write + +The `zksync-cli contract write` command performs write operations on smart contracts. +It enables sending transactions that alter the state of a contract, such as transferring tokens or changing ownership. + +### Write Options + +You do not need to specify options below, you will be prompted to enter them if they are not specified. + +- `--chain <chain-name>`: Select the chain to use +- `--rpc <url>`: Provide RPC URL instead of selecting a chain +- `--contract <address>`: Specify contract's address +- `--method <method-signature>`: Defines the contract method to interact with +- `--arguments <method-arguments...>`: Pass arguments to the contract method +- `--value <ether-amount>`: Ether amount to send with the transaction (e.g. 0.01) +- `--private-key <wallet-private-key>`: Private key of the wallet to use to sign the transaction +- `--data <0x-transaction-data>`: Instead of specifying the method and arguments, you can pass the raw transaction data +- `--abi <path>`: Path to local ABI file or contract artifact +- `--show-info`: Displays transaction request information (e.g. encoded transaction data) + +### Basic write example + +```bash +zksync-cli contract write +``` + +1. You will be prompted to select a chain, contract address, and method. + + ```bash + + ? Chain to use: %%zk_testnet_name%% + ? Contract address: 0x45E6dC995113fd3d1A3b1964493105B9AA9a9A42 + ``` + +1. Select a **method (function) to call**. + + - In case your contract is verified it will automatically identify the ABI: + + ```bash + ? Contract method to call + ────────── Provided contract ────────── + ❯ approve(address spender, uint256 amount) returns (bool) + transfer(address to, uint256 amount) returns (bool) + ─────────────────────────────────────── + Type method manually + ``` + + - Otherwise you'll have to enter method signature manually, for example `transfer(address,uint256)`. + + ```bash + ? Enter method to call: transfer(address,uint256) + ``` + + - Alternatively, you can specify the ABI file manually using the `--abi` option. [See example](#using-a-local-abi-file) + +1. You will be prompted to enter **arguments** for the method, one by one. + + ```bash + ? Provide method arguments: + ? [1/2] to (address): 0xa1cf087DB965Ab02Fb3CFaCe1f5c63935815f044 + ? [2/2] amount (uint256): 1 + ``` + +1. provide private key of the wallet to use to sign the transaction. + + ```bash + ? Private key of the wallet to sign transaction: ***** + ``` + +When submitted a contract call will be made and you'll see the transaction hash. + +```bash +✔ Transaction submitted. Transaction hash: 0xa83ad7e8932e18cdc57d3892040505a50d560a56fa507cabcd4180e9e5898bec +``` + +## Encode + +The `zksync-cli contract encode` command will get calldata (e.g. 0x1234) from contract method signature and arguments. + +### Encode options + +- `--method <contractMethod(arguments)>`: Contract method to interact with +- `--args, --arguments <arguments...>`: Arguments +- `--abi <path/to/abi>`: Contract artifact or ABI file location. + +### Basic encode example + +```bash +zksync-cli contract encode +``` + +1. Select a **method (function) to encode**. + + - Enter method signature manually, for example `transfer(address,uint256)`. + + ```bash + ? Enter method to call: transfer(address,uint256) + ``` + + - Alternatively, you can specify the ABI file using the `--abi` option. [See example](#using-a-local-abi-file) + + ```bash + ? Contract method to call + ────────── Provided contract ────────── + ❯ approve(address spender, uint256 amount) returns (bool) + transfer(address to, uint256 amount) returns (bool) + ─────────────────────────────────────── + Type method manually + ``` + +1. Enter the **arguments** for the method, one by one. + + ```bash + ? Provide method arguments: + ? [1/2] to (address): 0xa1cf087DB965Ab02Fb3CFaCe1f5c63935815f044 + ? [2/2] amount (uint256): 1 + ``` + +When finished you will see the encoded data. + +```bash +✔ Encoded data: 0xa41368620000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c48656c6c6f20776f726c64210000000000000000000000000000000000000000 +``` + +## Using a local ABI file + +You can specify a local ABI file using the `--abi` option. +It should be a JSON file with either ABI data (array) or contract artifact which you get after compiling your contracts. + +```bash +zksync-cli contract read \ + --abi "./Greeter.json" +``` + +You will be prompted to select a method (function): + +```bash + ────────── Provided contract ────────── +❯ greet() view returns (string) + ─────────────────────────────────────── + Type method manually +``` + +Response will be decoded automatically according to the ABI file. diff --git a/content/00.build/40.tooling/10.zksync-cli/40.zksync-cli-transaction.md b/content/00.build/40.tooling/10.zksync-cli/40.zksync-cli-transaction.md new file mode 100644 index 00000000..f1fe62a5 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/40.zksync-cli-transaction.md @@ -0,0 +1,87 @@ +--- +title: zksync-cli transaction +description: Interact with transaction information using zksync-cli. +--- + +The `zksync-cli transaction info` command is designed to fetch and display detailed information about a specific transaction. +It can be used to check the status, amounts transferred, fees, method signatures, and arguments of transactions. + +## Options + +If no options are provided directly, the CLI will prompt the user to enter the necessary information, such as the chain and transaction hash. + +- `--tx <transaction hash>`: Specify the transaction hash to query. +- `--chain <chain-name>`: Select the chain to use (e.g., `zksync-mainnet`, `zksync-sepolia`). +- `--rpc <url>`: Provide RPC URL instead of selecting a chain +- `--full`: Show all available transaction data for comprehensive insights. +- `--raw`: Display the raw JSON response from the node. +- `--abi <path>`: Path to a local ABI file to decode the transaction's input data. + +## Examples + +### Basic usage + +```bash +zksync-cli transaction info +``` + +You will be prompted to select a chain and transaction hash. + +```bash +? Chain to use: %%zk_testnet_name%% +? Transaction hash: 0x2547ce8219eb7ed5d73e68673b0e4ded83afc732a6c651d43d9dc49bb2f13d40 +``` + +The command will display detailed information about the transaction: + +```bash +──────────────────── Main info ──────────────────── +Transaction hash: 0x2547ce8219eb7ed5d73e68673b0e4ded83afc732a6c651d43d9dc49bb2f13d40 +Status: completed +From: 0x56DDd604011c5F8629bd7C2472E3504Bd32c269b +To: 0xBB5c309A3a9347c0135B93CbD53D394Aa84345E5 +Value: 0 ETH +Fee: 0.0001503581 ETH | Initial: 0.0004 ETH Refunded: 0.0038496419 ETH +Method: transmit(bytes,bytes32[],bytes32[],bytes32) 0xc9807539 + +───────────────── Method arguments ───────────────── +[1] bytes: 0x0000000000000000000000fd69e45d6f51e482ac4f8f2e14f2155200008b5f010001020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000007df298c81a0000000000000000000000000000000000000000000000000000007df298c81a0000000000000000000000000000000000000000000000000000007df298c81a +[2] bytes32[]: 0xd737d65b6b610c3f330bcfddbfc08e46d2a628c88bf22ec0d8f25627a3330798,0x9d33be2ba33b731555c13a4e7bf02d3d576fa3115f7523cbf07732321c85cdba +[3] bytes32[]: 0x73d639deda36b781ae049c8eceafba4196ee8ecc1efb74c538a28ea762ff6658,0x37ac79ff2ca902140613b0e51357d8fb218a67b4736bdee0c268c5fd9812e146 +[4] bytes32: 0x0101000000000000000000000000000000000000000000000000000000000000 + +───────────────────── Details ───────────────────── +Date: 2/8/2024, 2:19:54 PM (15 minutes ago) +Block: #364999 +Nonce: 50131 +``` + +### Parsing transaction data + +By default `zksync-cli` tries to fetch contract verification data from the server. +In case this is not possible it queries the +[open signature](https://www.4byte.directory/) database to get the signature of the transaction method. +If the method signature is not found, the transaction's data is displayed as a hex string. + +Alternatively, you can provide the path to a local ABI file to decode the transaction's input data: + +```bash +zksync-cli transaction info \ + --abi "./Greeter.json" +``` + +### Viewing detailed information + +For an even more detailed overview you can use the `--full` option: + +```bash +zksync-cli transaction info --full +``` + +### Displaying raw JSON response + +To view the raw JSON response from the zkSync node, use the `--raw` option: + +```bash +zksync-cli transaction info --raw +``` diff --git a/content/00.build/40.tooling/10.zksync-cli/50.zksync-cli-wallet.md b/content/00.build/40.tooling/10.zksync-cli/50.zksync-cli-wallet.md new file mode 100644 index 00000000..6ff82343 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/50.zksync-cli-wallet.md @@ -0,0 +1,67 @@ +--- +title: zksync-cli wallet +description: Manage your wallet on zkSync using zksync-cli. +--- + +Utilize the `zksync-cli wallet` command for an easy way to manage your assets on zkSync, like token transfers and balance check. + +## Commands + +- `transfer [options]` - Transfer tokens on L2 to another account. +- `balance [options]` - Get token balance of an L2 account. + +## Transfer + +To transfer ETH between accounts on zkSync, use the following command: + +```bash +zksync-cli wallet transfer +``` + +### Options + +If no options are provided directly, the CLI will prompt the user to enter the necessary information + +- `--amount <0.1>`: Specify the amount to transfer. +- `--chain <chain>`: Select the chain to use. +- `--rpc <URL>`: Override the default L2 RPC URL. +- `--pk`, `--private-key <wallet private key>`: Use the private key of the sender for the transaction. +- `--to`, `--recipient <0x address>`: Define the recipient address on L2. +- `--token <token address>`: Specify an ERC-20 token for the transfer instead of ETH. + +### Examples + +#### Transfer ERC-20 Token + +For transferring ERC-20 tokens, include the `--token` option with the token's contract address: + +```bash +zksync-cli wallet transfer --token 0x3e622317f8C93f7328350cF0B56d9eD4C620C5d6 +``` + +## Balance + +View an L2 wallet's ETH balance using the following command: + +```bash +zksync-cli wallet balance +``` + +### Options + +If no options are provided directly, the CLI will prompt the user to enter the necessary information + +- `--address <0x address>`: Address of the wallet to check. +- `--token <token address>`: Specify an ERC-20 token for the transfer instead of ETH. +- `--chain <chain>`: Chain to use. +- `--rpc <URL>`: Override the default L2 RPC URL. + +### Examples + +#### Checking ERC-20 Token Balance + +To check the balance of a specific ERC-20 token, use the `--token` option, for example: + +```bash +zksync-cli wallet balance --token 0x3e622317f8C93f7328350cF0B56d9eD4C620C5d6 +``` diff --git a/content/00.build/40.tooling/10.zksync-cli/60.zksync-cli-bridge.md b/content/00.build/40.tooling/10.zksync-cli/60.zksync-cli-bridge.md new file mode 100644 index 00000000..24453199 --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/60.zksync-cli-bridge.md @@ -0,0 +1,91 @@ +--- +title: zksync-cli bridge +description: Facilitate bridge operations between L1 and L2 using zksync-cli. +--- + +Facilitate bridge operations between Ethereum (L1) and zkSync (L2), including token deposits, withdrawals, +and finalizing withdrawals with the `zksync-cli bridge` command. + +## Commands + +- `deposit [options]` - Transfer token from L1 to L2. +- `withdraw [options]` - Transfer token from L2 to L1. +- `withdraw-finalize [options]` - Finalize withdrawal of funds. + +## Deposit + +Transfer ETH from L1 to L2 using the deposit command. + +```bash +zksync-cli bridge deposit +``` + +### Options + +If options are not specified, you will be prompted to enter them. + +- `--to, --recipient <0x address>`: Recipient address on L2. +- `--amount <0.1>`: Amount to deposit. +- `--token <0x address>`: ERC-20 token address. +- `--pk, --private-key <wallet private key>`: Private key of the sender. +- `--chain <chain>`: Chain to use. +- `--l1-rpc <URL>`: Override L1 RPC URL. +- `--rpc <URL>`: Override L2 RPC URL. + +### Examples + +#### Depositing ERC-20 Tokens + +To deposit ERC-20 tokens, include the `--token` option with the token's contract address: + +```bash +zksync-cli bridge deposit --token 0x3e622317f8C93f7328350cF0B56d9eD4C620C5d6 +``` + +## Withdraw + +Transfer tokens from L2 back to L1. + +```bash +zksync-cli bridge withdraw +``` + +### Options + +You will be prompted to enter options if they are not specified. + +- `--to, --recipient <0x address>`: Recipient address on L1. +- `--amount <0.1>`: Amount to withdraw. +- `--token <0x address>`: ERC-20 token address (omit this option to withdraw ETH). +- `--pk, --private-key <wallet private key>`: Private key of the sender. +- `--chain <chain>`: Chain to use. +- `--l1-rpc <URL>`: Override L1 RPC URL. +- `--rpc <URL>`: Override L2 RPC URL. + +### Examples + +#### Withdrawing ERC-20 Tokens + +For withdrawing ERC-20 tokens, specify the token address using the `--token` option: + +```bash +zksync-cli bridge withdraw --token 0x3e622317f8C93f7328350cF0B56d9eD4C620C5d6 +``` + +## Withdraw Finalize + +Finalize the withdrawal of funds with the following command. **This step is necessary to complete the withdrawal process initiated on L2.** + +```bash +zksync-cli bridge withdraw-finalize +``` + +### Options + +Options will be prompted if not specified. + +- `--hash <transaction_hash>`: L2 withdrawal transaction hash to finalize. +- `--pk, --private-key <wallet private key>`: Private key of the sender. +- `--chain <chain>`: Chain to use. +- `--l1-rpc <URL>`: Override L1 RPC URL. +- `--rpc <URL>`: Override L2 RPC URL. diff --git a/content/00.build/40.tooling/10.zksync-cli/70.zksync-cli-config-chains.md b/content/00.build/40.tooling/10.zksync-cli/70.zksync-cli-config-chains.md new file mode 100644 index 00000000..6d50e58f --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/70.zksync-cli-config-chains.md @@ -0,0 +1,33 @@ +--- +title: zksync-cli config chains +description: Configure custom chains to use with zksync-cli. +--- + +Specify your own chain configuration by adding or editing custom chains to use on `zksync-cli`. +This feature is essential for developers looking to interact with ZK Stack Chains. + +## Configuring Custom Chains + +To add or edit a custom chain, use the following command: + +```bash +zksync-cli config chains +``` + +Upon execution, you will be guided through a series of prompts to enter specific details for the custom chain, including: + +- **Chain ID**: The id for the chain. +- **Chain Name**: A name for the chain. +- **Chain key**: A unique identifier for the chain. +- **RPC URL**: The RPC endpoint URL for interacting with the chain. +- **Other Information**: Depending on the chain's requirements, you may need to provide additional information such as block explorer URLs. + +## Using Custom Chains + +Once a custom chain is configured, you can use it across various `zksync-cli` commands. +All of your custom chains will be listed in the chain selection prompt, allowing you to select and use them as needed. +You can also specify a custom chain directly using the `--chain` option, for example: + +```bash +zksync-cli wallet balance --chain <custom-chain-key> +``` diff --git a/content/00.build/40.tooling/10.zksync-cli/_dir.yml b/content/00.build/40.tooling/10.zksync-cli/_dir.yml new file mode 100644 index 00000000..acdf43cf --- /dev/null +++ b/content/00.build/40.tooling/10.zksync-cli/_dir.yml @@ -0,0 +1 @@ +title: zkSync CLI diff --git a/content/00.build/40.tooling/20.hardhat/10.getting-started.md b/content/00.build/40.tooling/20.hardhat/10.getting-started.md new file mode 100644 index 00000000..a1570606 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/10.getting-started.md @@ -0,0 +1,359 @@ +--- +title: Getting started +description: Learn how to use Hardhat with zkSync. +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you are using Windows, we strongly recommend you use Windows Subsystem for Linux (also known as WSL 2). +You can use `Hardhat` and `Hardhat zkSync plugins` without it, but it will work better if you use it. + +To install Node.js using WSL 2, please read this [guide](https://learn.microsoft.com/en-us/windows/dev-environment/javascript/nodejs-on-wsl). +:: + +[Hardhat](https://hardhat.org) is an Ethereum development environment, designed for easy smart contract development. +One of its most prominent features is extendability: you can easily add new plugins to your hardhat project. + +zkSync Era has the following official plugins for Hardhat: + +- [@matterlabs/hardhat-zksync](hardhat-zksync) - used to access to all of the supported plugins and to use them +as needed in your project. This should be the primary plugin most developers will need to use. +- [@matterlabs/hardhat-zksync-solc](hardhat-zksync-solc) - used to compile contracts written in Solidity. +- [@matterlabs/hardhat-zksync-vyper](hardhat-zksync-vyper) - used to compile contracts written in Vyper. +- [@matterlabs/hardhat-zksync-deploy](hardhat-zksync-deploy) - used to deploy smart contracts. +- [@matterlabs/hardhat-zksync-verify](hardhat-zksync-verify) - used to verify smart contracts. +- [@matterlabs/hardhat-zksync-verify-vyper](hardhat-zksync-verify-vyper) - used to verify vyper smart contracts. +- [@matterlabs/hardhat-zksync-upgradable](hardhat-zksync-upgradable) - used to deploy, update, and verify proxy smart contracts. +- [@matterlabs/hardhat-zksync-ethers](hardhat-zksync-ethers) - wrapper around zksync-ethers with some extra Hardhat-specific functionality. +- [@matterlabs/hardhat-zksync-node](hardhat-zksync-node) - used to run the zkSync era-test-node locally. + +Along with the official plugins, there are [other plugins from the community](/build/tooling/hardhat/other-plugins) that you can use with zkSync Era. + +To learn more about Hardhat itself, check out [the official documentation](https://hardhat.org/getting-started/). + +This tutorial shows you how to setup a zkSync Era Solidity project with Hardhat using the [zkSync CLI](/build/tooling/zksync-cli). + +If you are using Vyper, check out the [Vyper plugin documentation](/build/tooling/hardhat/hardhat-zksync-vyper) +or the [vyper-example](%%zk_git_repo_hardhat-zksync%%/tree/main/examples/vyper-example) in GitHub! + +## Prerequisites + +- Make sure your machine satisfies the [system requirements](%%zk_git_repo_era-compiler-solidity%%/tree/main#system-requirements). +- You have Node installed and have `yarn` or `npm` package manager. +- You are already familiar with deploying smart contracts on zkSync. If not, please refer to the first section of the [Contract Deployment](/build/developer-reference/ethereum-differences/contract-deployment) +- A wallet with sufficient Sepolia `ETH` on Ethereum and %%zk_testnet_name%% to pay for deploying smart contracts. +- You can get Sepolia ETH from the [network faucets](/ecosystem/network-faucets). + - Get testnet `ETH` for zkSync Era using [bridges](https://zksync.io/explore#bridges) to bridge funds to zkSync. + +- You know how to get your [private key from your MetaMask wallet](https://support.metamask.io/hc/en-us/articles/360015289632-How-to-export-an-account-s-private-key). + +::callout +Skip the hassle for test ETH by using `zksync-cli` for local testing. +Use the `npx zksync-cli dev start` command to initialize a local zkSync development environment, which includes local Ethereum and zkSync nodes. +This method allows you to test contracts without requesting external testnet funds. Explore more in the [zksync-cli documentation](/build/tooling/zksync-cli). +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +- Contracts must be compiled using the +[official zkSync Era compilers](/zk-stack/components/compiler/toolchain), with their respective Hardhat plugins. + +- Contracts compiled with other compilers will fail to deploy to zkSync Era. +:: + +## Project setup + +To create a new project run the `zksync-cli create` command, passing a project name: + +#### Solidity project + +```sh +npx zksync-cli create demo --template hardhat_solidity +``` + +#### Vyper project + +```sh +npx zksync-cli create demo --template hardhat_vyper +``` + +This command creates a `demo` folder and clones a Hardhat template project inside it. +The downloaded project is already configured and contains all the required plugins. + +::callout +If you want to migrate an existing project, please check the [project migration guide](migrating-to-zksync). +:: + +## Hardhat configuration + +The `hardhat.config.ts` file contains some zkSync Era specific configurations: + +The zkSync Era deployment and compiler plugin imports: + +#### Solidity project + +```ts +import "@matterlabs/hardhat-zksync"; +``` + +The `zksolc` block contains the minimal configuration for the compiler. + +```ts +zksolc: { + version: "latest", // Uses latest available in %%zk_git_repo_zksolc-bin%% + settings: {}, +}, +``` + +#### Vyper project + +```ts +import "@nomiclabs/hardhat-vyper"; +import "@matterlabs/hardhat-zksync-deploy"; +import "@matterlabs/hardhat-zksync-vyper"; +``` + +The `zkvyper` block contains the minimal configuration for the compiler. + +```ts +zkvyper: { + version: "latest", // Uses latest available in %%zk_git_repo_zkvyper-bin%% + settings: {}, +}, +``` + +#### Network + +The network endpoints of the `zkSyncTestnet` network change dynamically for local tests. + +```ts +// dynamically changes endpoints for local tests +const zkSyncTestnet = + process.env.NODE_ENV == "test" + ? { + url: "http://localhost:3050", + ethNetwork: "http://localhost:8545", + zksync: true, + } + : { + url: "%%zk_testnet_rpc_url%%", + ethNetwork: "%%zk_testnet_identifier%%", + zksync: true, + }; +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +For local zkSync testing, modify `url` and `ethNetwork` in `hardhat.config.ts` +to align with your local zkSync and Ethereum node's L2 and L1 RPC URLs, respectively. +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +This template project includes a basic unit test in the `/test` folder that runs with the local-setup and can be executed with `yarn test`. +:: + +## Set your Private Key + +Rename `.env.example` to `.env` and set your private key: + +```text +WALLET_PRIVATE_KEY=YourPrivateKeyHere +``` + +Your private key will be used for paying the costs of deploying the smart contract. + +## Compile and deploy a contract + +Smart contracts belong in the `contracts` folder. + +#### 1. To compile the contract, run + +```sh +yarn hardhat compile +``` + +You'll see the following output: + +```text +Compiling 1 Solidity file +Successfully compiled 1 Solidity file +// Successfully compiled 1 Vyper file - Vyper project +✨ Done in 1.09s. +``` + +The `artifacts-zk` and `cache-zk` folders appear in the root directory (instead of the regular Hardhat's `artifacts` and `cache`). +These folders contain the compilation artifacts (including contract's ABIs) and compiler cache files. + +::callout +The `artifacts-zk` and `cache-zk` folders are included in the `.gitignore` file. +:: + +The `deploy-greeter.ts` script is in the `deploy` folder. +This script uses the `Deployer` class from the `hardhat-zksync-deploy` package to deploy the `Greeter.sol`/`Greeter.vy` contract. + +```ts +import { Wallet, utils } from "zksync-ethers"; +import * as ethers from "ethers"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; +import { Deployer } from "@matterlabs/hardhat-zksync-deploy"; + +// load env file +import dotenv from "dotenv"; +dotenv.config(); + +// load wallet private key from env file +const PRIVATE_KEY = process.env.WALLET_PRIVATE_KEY || ""; + +if (!PRIVATE_KEY) throw "⛔️ Private key not detected! Add it to the .env file!"; + +// An example of a deploy script that will deploy and call a simple contract. +export default async function (hre: HardhatRuntimeEnvironment) { + console.log(`Running deploy script for the Greeter contract`); + + // Initialize the wallet. + const wallet = new Wallet(PRIVATE_KEY); + + // Create deployer object and load the artifact of the contract you want to deploy. + const deployer = new Deployer(hre, wallet); + const artifact = await deployer.loadArtifact("Greeter"); + + // Estimate contract deployment fee + const greeting = "Hi there!"; + const deploymentFee = await deployer.estimateDeployFee(artifact, [greeting]); + + // ⚠️ OPTIONAL: You can skip this block if your account already has funds in L2 + // const depositHandle = await deployer.zkWallet.deposit({ + // to: deployer.zkWallet.address, + // token: utils.ETH_ADDRESS, + // amount: deploymentFee.mul(2), + // }); + // // Wait until the deposit is processed on zkSync + // await depositHandle.wait(); + + // Deploy this contract. The returned object will be of a `Contract` type, similar to ones in `ethers`. + // `greeting` is an argument for contract constructor. + const parsedFee = ethers.formatEther(deploymentFee); + console.log(`The deployment is estimated to cost ${parsedFee} ETH`); + + const greeterContract = await deployer.deploy(artifact, [greeting]); + + // obtain the Constructor Arguments + console.log("constructor args:" + greeterContract.interface.encodeDeploy([greeting])); + + // Show the contract info. + const contractAddress = await greeterContract.getAddress(); + console.log(`${artifact.contractName} was deployed to ${contractAddress}`); +} +``` + +#### 2. To execute the deployment script run + +```sh +yarn hardhat deploy-zksync --script deploy-greeter.ts +``` + +This script deploys the `Greeting` contract with the message "Hi there!" to %%zk_testnet_name%%. + +You should see something like this: + +```bash +Running deploy script for the Greeter contract +The deployment is estimated to cost 0.00579276320831943 ETH +constructor args:0x000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000094869207468657265210000000000000000000000000000000000000000000000 +Greeter was deployed to 0x46f1d2d8A16DBD8b47e9D61175a826ac667288Be4D1293a22E8 + +✨ Done in 12.69s. +``` + +Congratulations! You have deployed a smart contract project to %%zk_testnet_name%% with Hardhat 🎉 + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Request-Rate Exceeded message**: + +- This message is caused by using the default RPC endpoints provided by ethers. +- To avoid this, use your own Sepolia RPC endpoint in the `hardhat.config.ts` file. +- Find multiple [node providers here](https://github.com/arddluma/awesome-list-rpc-nodes-providers). +:: + +## Interact with the contract + +The template project contains another script to interact with the contract. + +1. Enter the address of the deployed Greeter contract in the `CONTRACT_ADDRESS` variable of the `use-greeter.ts` script: + + ```ts [use-greeter.ts] + import { Provider } from "zksync-ethers"; + import * as ethers from "ethers"; + import { HardhatRuntimeEnvironment } from "hardhat/types"; + + // load env file + import dotenv from "dotenv"; + dotenv.config(); + + // load contract artifact. Make sure to compile first! - Solidity Project + import * as ContractArtifact from "../artifacts-zk/contracts/Greeter.sol/Greeter.json"; + // load contract artifact. Make sure to compile first! - Vyper Project + //import * as ContractArtifact from "../artifacts-zk/contracts/Greeter.vy/Greeter.json"; + + const PRIVATE_KEY = process.env.WALLET_PRIVATE_KEY || ""; + + if (!PRIVATE_KEY) throw "⛔️ Private key not detected! Add it to the .env file!"; + + // Address of the contract on zkSync testnet + const CONTRACT_ADDRESS = ""; + + if (!CONTRACT_ADDRESS) throw "⛔️ Contract address not provided"; + + // An example of a deploy script that will deploy and call a simple contract. + export default async function (hre: HardhatRuntimeEnvironment) { + console.log(`Running script to interact with contract ${CONTRACT_ADDRESS}`); + + // Initialize the provider. + // @ts-ignore + const provider = new Provider(hre.userConfig.networks?.zkSyncTestnet?.url); + const signer = new ethers.Wallet(PRIVATE_KEY, provider); + + // Initialise contract instance + const contract = new ethers.Contract(CONTRACT_ADDRESS, ContractArtifact.abi, signer); + + // Read message from contract + console.log(`The message is ${await contract.greet()}`); + + // send transaction to update the message + const newMessage = "Hello people!"; + const tx = await contract.setGreeting(newMessage); + + console.log(`Transaction to change the message is ${tx.hash}`); + await tx.wait(); + + // Read message after transaction + console.log(`The message now is ${await contract.greet()}`); + } + ``` + +1. To execute the script, run: + + ```sh + yarn hardhat deploy-zksync --script use-greeter.ts + ``` + + The script will: + + - Retrieve the message from the contract by calling the `greet()` method. + - Update the greeting message in the contract with the `setGreeting()` method. + - Retrieve the message from the contract again. + + You should see something like this: + + ```bash + Running script to interact with contract Greeter + The message is Hello there! + Transaction to change the message is 0x12f16578A16DB0f47e9D61175a823ac214288Af + The message now is Hello people! + + ✨ Done in 14.32s. + ``` + +## Learn more + +- To learn more about the zkSync Hardhat plugins check out the [plugins documentation](/build/tooling/hardhat/getting-started). +- If you want to know more about how to interact with zkSync using Javascript, +check out the [zksync-ethers Javascript SDK documentation](https://docs.zksync.io/sdk/js/ethers/v6/getting-started). diff --git a/content/00.build/40.tooling/20.hardhat/100.hardhat-zksync-ethers.md b/content/00.build/40.tooling/20.hardhat/100.hardhat-zksync-ethers.md new file mode 100644 index 00000000..afd24f1a --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/100.hardhat-zksync-ethers.md @@ -0,0 +1,181 @@ +--- +title: hardhat-zksync-ethers +description: +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + - Compatible with ethers **v6** (⭐ Recommended) + +Examples are adopted for plugin version **>=1.0.0** +:: + +## Installation + +[@matterlabs/hardhat-zksync-ethers](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-ethers) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-ethers zksync-ethers ethers +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-ethers +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-ethers --dev +``` + +:: + +## Configuration + +Import the package in the `hardhat.config.ts` file: + +```ts +import "@matterlabs/hardhat-zksync-ethers"; +``` + +## Tasks + +This plugin creates no additional tasks. + +## Environment extensions + +This plugins adds an zksync-ethers object to the Hardhat Runtime Environment. + +This object has the same API as [zksync-ethers](https://docs.zksync.io/sdk/js/ethers/v6/getting-started), with some extra Hardhat-specific functionality. + +## Helpers + +Helpers added to zksync-ethers object: + +```ts +interface FactoryDeps { + // A mapping from the contract hash to the contract bytecode. + [contractHash: string]: string; +} + +interface ZkSyncArtifact extends Artifact { + // List of factory dependencies of a contract. + factoryDeps: FactoryDeps; + // Mapping from the bytecode to the %%zk_zkevm_label%% assembly (used for tracing). + sourceMapping: string; +} + +interface FactoryOptions { + wallet?: zk.Wallet; +} + +function providerL2: () => zk.Provider; +function providerL1: () => ethers.Provider; +function getWallet: (privateKeyOrIndex?: string | number) => zk.Wallet; +function getContractFactory: (name: string, wallet?: zk.Wallet, deploymentType?: DeploymentType) => Promise<zk.ContractFactory>; +function getContractFactory: (abi: any[], bytecode: ethers.BytesLike,wallet?: Wallet,deploymentType?: DeploymentType) => Promise<zk.ContractFactory>; +function getContractFactoryFromArtifact: (artifact: ZkSyncArtifact, wallet?: zk.Wallet, deploymentType?: DeploymentType) => Promise<zk.ContractFactory>; +function getContractAt: (nameOrAbi: string | any[], address: string | Address, wallet?: zk.Wallet) => Promise<zk.Contract>; +function getContractAtFromArtifact: (artifact: ZkSyncArtifact, address: string, wallet?: zk.Wallet) => Promise<zk.Contract>; +function getSigner: (address: string) => zk.Signer; +function getSigners: () => zk.Signer[]; +function getImpersonatedSigner: (address: string) => Promise<zk.Signer>; +function extractFactoryDeps: (artifact: ZkSyncArtifact) => Promise<string[]>; +function loadArtifact: (name: string) => Promise<ZkSyncArtifact>; +function deployContract: (artifact: ZkSyncArtifact, constructorArguments: any[], wallet?: zk.Wallet, overrides?: ethers.Overrides, additionalFactoryDeps?: ethers.BytesLike[]) => Promise<zk.Contract>; +``` + +- `providerL2()` - returns a `zk.Provider` for L2, automatically connected to the selected network. +- `providerL1()` - returns a `ethers.Provider` for L1, automatically connected to the selected network. +- `getWallet(privateKeyOrIndex?: string | number)` - returns `zk.Wallet` for the given private key or index. +If the network is set to local and the private key is not provided, the method will return a wallet for rich accounts +with the default index of `0` or the specified index. +If the `accounts` object is set in the hardhat config and the private key is not specified, +the method will return the wallet for the given account with the default index `0` or for the specified index. +- `getWallets()` - returns all wallets of type `zk.Wallet`. If the network is set to local, the method will return wallets for rich accounts. +If the `accounts` object is set in the hardhat config for the used network, the method will return the wallets for the provided accounts. +- `getContractFactory(name: string, wallet?: zk.Wallet, deploymentType?: DeploymentType)` - returns a `zk.ContractFactory` for provided artifact name. +- `getContractFactory: (abi: any[], bytecode: ethers.BytesLike,wallet?: Wallet,deploymentType?: DeploymentType)` - returns a zk.ContractFactory +for provided artifact abi and bytecode. +- `getContractFactoryFromArtifact(artifact: ZkSyncArtifact, wallet?: zk.Wallet, deploymentType?: DeploymentType)` - returns a `zk.ContractFactory` +for provided artifact. +- `getContractAt(nameOrAbi: string | any[], address: string | Address, wallet?: zk.Wallet)` - returns `zk.Contract` for provided artifact name +or abi and address of deployed contract +- `getContractAtFromArtifact: (artifact: ZkSyncArtifact, address: string, wallet?: zk.Wallet)` - returns `zk.ContractFactory` for provided artifact +and address of deployed contract +- `getImpersonatedSigner(address: string)` - impersonates `zk.Signer` from address +- `loadArtifact(name: string)` - load `ZkSyncArtifact` from contract name +- `extractFactoryDeps(artifact: ZkSyncArtifact)` - extracts factory deps from artifact +<!-- markdownlint-disable-next-line MD013 --> +- `deployContract(artifact: ZkSyncArtifact, constructorArguments: any[], wallet?: zk.Wallet, overrides?: ethers.Overrides, additionalFactoryDeps?: ethers.BytesLike[])` - deploys contract, for more details check out the [deployment section of the quickstart](getting-started#compile-and-deploy-a-contract). + +::callout{icon="i-heroicons-information-circle" color="blue"} + +- If `wallet?: zk.Wallet` is not provided and if the network is set to local, +the default wallet will be the first account in the list of rich accounts. +If an `accounts` object is set in the hardhat config for the used network, the default wallet will be taken from that object. +- If `deploymentType?: DeploymentType` is not provided default value will be `create`. +:: + +## Usage + +Install it and access zksync-ethers through the Hardhat Runtime Environment anywhere you need it (tasks, scripts, tests, etc). For example: + +Task usage: + +```ts +task("getFeeData", "Returns a fee data.").setAction(async (hre) => { + const feeDataL2 = await hre.zksyncEthers.providerL2.getFeeData(); + const feeDataL1 = await this.env.zksyncEthers.providerL1.getFeeData(); + + return { feeDataL2, feeDataL1 }; +}); +``` + +Script usage: + +```ts +export default async function (hre: HardhatRuntimeEnvironment) { + console.info(chalk.yellow(`Running deploy`)); + + //automatically connected to the selected network + const gasPrice = await hre.zksyncEthers.providerL2.send("eth_gasPrice", []); + + //getContractFactory with default wallet, deploy contract and set new greeting message + const greeterFactory = await hre.zksyncEthers.getContractFactory("Greeter"); + const greeter = await greeterFactory.deploy("Hello, world!"); + + console.info(chalk.green(`Greeter deployed to: ${await greeter.getAddress()}`)); + + console.info(chalk.green(`Greeter greeting set to: ${await greeter.greet()}`)); + + const tx = await greeter.setGreeting("Hello, world again!"); + await tx.wait(); + console.info(chalk.green(`Greeter greeting set to: ${await greeter.greet()}`)); + + // deploy with provided wallet using loadArtifact and deployContract + const wallet = await hre.zksyncEthers.getWallet("0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110"); + console.info(chalk.green(`Wallet address: ${await wallet.getAddress()}`)); + + // deposit ETH from L1 to L2 to cover costs of deployment + const depositHandle = await wallet.deposit({ + to: wallet.address, + token: utils.ETH_ADDRESS, + amount: ethers.parseEther("0.001"), + }); + await depositHandle.wait(); + + const artifact = await hre.zksyncEthers.loadArtifact("Greeter"); + const greets = await hre.zksyncEthers.deployContract(artifact, ["Hello, world!"], wallet); + console.info(chalk.green(`Greeter deployed to: ${await greets.getAddress()}`)); + console.info(chalk.green(`Greeter greeting set to: ${await greets.greet()}`)); +} +``` diff --git a/content/00.build/40.tooling/20.hardhat/120.hardhat-zksync-node.md b/content/00.build/40.tooling/20.hardhat/120.hardhat-zksync-node.md new file mode 100644 index 00000000..da625d2f --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/120.hardhat-zksync-node.md @@ -0,0 +1,169 @@ +--- +title: hardhat-zksync-node +description: +--- + +This plugin is used to provide a convenient way to run ZKsync Era [In-memory node](/build/test-and-debug/in-memory-node) locally using hardhat. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The zkSync Era In-memory node binaries are not supported on Windows at the moment. +As an alternative, users can utilize the Windows Subsystem for Linux (WSL). +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + - Compatible with ethers **v6** (⭐ Recommended) + +:: + +## Installation + +[@matterlabs/hardhat-zksync-node](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-node) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-node zksync-ethers ethers +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-node +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-node zksync-ethers ethers --dev +``` + +:: + +### Configuration + +Import the plugin in the `hardhat.config.ts` file: + +```javascript +import "@matterlabs/hardhat-zksync-node"; +``` + +### Commands + +::code-group + +```sh [yarn] +yarn hardhat node-zksync +``` + +```sh [npm] +npm run hardhat node-zksync +``` + +:: + +This command runs a local zkSync In-memory node by initiating a JSON-RPC server. +It uses the provided or default configurations to set up and run the zkSync node, allowing for blockchain operations in a local environment. +The command also handles tasks such as downloading the necessary JSON-RPC server binary if it's not already present. + +- `--port` - Port on which the server should listen. Defaults to 8011. +- `--log` - Log filter level. Accepted values are: error, warn, info, and debug. Defaults to info. +- `--log-file-path` - Path to the file where logs should be written. Defaults to `era_test_node.log`. +- `--cache` - Type of cache to use. Accepted values are: none, disk, and memory. Defaults to disk. +- `--cache-dir` - Directory location for the `disk` cache. Defaults to `.cache`. +- `--reset-cache` - Flag to reset the local `disk` cache. +- `--show-calls` - Determines which call debug information to show. Accepted values are: none, user, system, and all. Defaults to none. +- `--show-storage-logs` - Determines which storage logs to show. Accepted values are: none, read, write, and all. Defaults to none. +- `--show-vm-details` - Specifies the level of Virtual Machine (VM) details to show. Accepted values are: none and all. Defaults to none. +- `--show-gas-details` - Specifies the level of gas details to show. Accepted values are: none and all. Defaults to none. +- `--resolve-hashes` - Flag to try contacting openchain to resolve the ABI & topic names. + When enabled, it makes the debug log more readable but might decrease performance. +- `--dev-use-local-contracts` - Flag to load locally compiled system contracts. Useful when making changes to system contracts or bootloader. +- `--fork` - Starts a local network that is a fork of another network. Accepted values are: testnet, mainnet, or a specific URL. +- `--fork-block-number` - Specifies the block height at which to fork. +- `--replay-tx` - Transaction hash to replay. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Parameter Restrictions**: +The `--replay-tx` and `--fork-block-number` parameters cannot be specified simultaneously. +The `--replay-tx` is used for replaying a remote transaction locally for deep debugging, +while `--fork-block-number` is used for forking the blockchain at a specified block number. +Combining these actions is not supported. + +Additionally, if either `--replay-tx` or `--fork-block-number` is specified, the `--fork` parameter must also be provided. +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Learn More**: +If you wish to learn more about replaying transactions or forking, +check out the [In-memory node documentation](/build/test-and-debug/in-memory-node). +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Supported APIs**: + +To see a list of all supported APIs, visit [this link](%%zk_git_repo_era-test-node%%/blob/main/SUPPORTED_APIS.md). +:: + +## Running Hardhat's test Task with hardhat-zksync-node + +The `hardhat-zksync-node` plugin enhances Hardhat's test task, allowing all tests to run against an In-memory node operated in a separate process. +By invoking the test task, ensure you are using the `hardhat` network and have set its `zksync` flag to `true`. +Doing so will initiate the plugin's In-memory node alongside the tests. After the tests conclude, the node shuts down gracefully. +The plugin begins port allocation from the default 8011. + +```ts +networks: { + hardhat: { + zksync: true, + } +}, +``` + +The network object in the Hardhat runtime environment is also updated to match the running node as follows: + +- The network name is set to `zkSyncEraTestNode`. +- The network config is set as an HTTP network config, adopting default values. +- The network provider uses a provider adapter that implements `EthereumProvider` and wraps the zksync's JS SDK Provider implementation. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Provider URL Mismatch**: + +When running tests, be aware that the In-memory node attempts to allocate free ports (starting from the default 8011). +This can lead to situations where the provider's URL does not match your expectations. +It's strongly recommended to use the network config URL from the hardhat runtime environment +to instantiate the Provider instance from the JS SDK, like this: + +```typescript +const provider = new Provider(hre.network.config.url); +``` + +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +If TypeScript marks the 'url' property indicating a potential issue (even though it works), simply add the following import to your project: + +```typescript +import "@matterlabs/hardhat-zksync-node/dist/type-extensions"; +``` + +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Accessing the Network Provider in Hardhat** + +Apart from the previously described method of instantiating the Provider, you can also directly access it from the Hardhat runtime environment. +Due to incompatibilities between Hardhat's `EthereumProvider` and the JS SDK Provider, we've introduced a new adapter (`ZkSyncProviderAdapter`). +This adapter bridges the gap and ensures that all the necessary functionalities are seamlessly integrated. +If you wish to access the JS SDK Provider directly, you can do so in TypeScript with: + +```typescript +// hre stands for hardhat runtime environment +(hre.network.provider as ZkSyncProviderAdapter)._zkSyncProvider; +``` + +:: diff --git a/content/00.build/40.tooling/20.hardhat/130.other-plugins.md b/content/00.build/40.tooling/20.hardhat/130.other-plugins.md new file mode 100644 index 00000000..e5fe0d08 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/130.other-plugins.md @@ -0,0 +1,92 @@ +--- +title: Hardhat Community Plugins +description: Discover community plugins for Hardhat that work on zkSync Era. +--- + +The following plugins were created by the community and tested on zkSync Era. +Feel free to suggest new plugins by [creating a issue(feat request) at this page](%%zk_git_repo_hardhat-zksync%%/issues/new?assignees=&labels=feat&projects=&template=feature_report.md&title=). + +## Supported plugins + +::callout{icon="i-heroicons-information-circle" color="blue"} +Here is [a template project configured with all plugins mentioned below](%%zk_git_repo_era-hardhat-with-plugins%%). +You can use it as a starting template for your projects. +:: + +### hardhat-deploy + +Multiple tasks for advanced deployments. + +This plugin was [updated to support zkSync Era](https://github.com/wighawag/hardhat-deploy/pull/437) on version `0.11.26`. + +[More information](https://www.npmjs.com/package/hardhat-deploy) + +### typechain/hardhat + +Automatically generate TypeScript bindings for smart contracts. + +[More Information](https://www.npmjs.com/package/@typechain/hardhat) + +### openzeppelin/hardhat-upgrades + +Plugin used to deploy and update upgradable smart contracts (proxies). +Use the [hardhat-zksync-upgradable plugin](hardhat-zksync-upgradable) which provides an easy-to-use interface +for interacting with the OpenZeppelin Upgrades Plugins within a Hardhat environment on zkSync. + +### hardhat-chai-matchers + +Adds capabilities to make your smart contract tests easy to write and read. + +[More Information](https://www.npmjs.com/package/@nomicfoundation/hardhat-chai-matchers) + +### hardhat-contract-sizer + +[More Information](https://www.npmjs.com/package/hardhat-contract-sizer) + +### hardhat-abi-exporter + +Different options to export smart contract ABIs. + +[More Information](https://www.npmjs.com/package/hardhat-abi-exporter) + +### hardhat-gas-reporter + +Although this plugin works out of the box, zkSync Era has a [different fee model](/build/developer-reference/fee-model) than Ethereum. +Users should consider this when analysing the report generated by this plugin. + +In addition, make sure to read about [local testing](/build/test-and-debug/). + +[More Information](https://www.npmjs.com/package/hardhat-gas-reporter) + +### hardhat-preprocessor + +This plugin enables the pre-processing of contract source code prior to compilation. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**CACHE_BREAKER Field Issue**: + +The hardhat-preprocessor plugin adds the CACHE_BREAKER field to the list of libraries, +which can lead to failed verification when using [hardhat-zksync-verify](hardhat-zksync-verify) plugin. +To prevent this, please include the `--no-compile` flag: `yarn hardhat verify --no-compile`. + +:: + +[More Information](https://www.npmjs.com/package/hardhat-preprocessor) + +## Unsupported plugins + +### nomicfoundation/hardhat-network-helpers + +This plugin adds new methods that interact with the Hardhat network used for testing. + +However, we do not recommend using the Hardhat network for testing contracts that will be deployed on zkSync Era. + +We recommend instead using the [in-memory node](/build/test-and-debug/in-memory-node) +or the [docker setup to test your contracts](/build/test-and-debug/dockerized-l1-l2-nodes) +as they will give you the same results as our testnet/mainnet. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The additional methods provided by this plugin are not compatible with the zkSync Era in-memory node or docker setup yet. +Currently, we are working on adapting our in-memory node to ensure compatibility with hardhat-network-helpers. + +:: diff --git a/content/00.build/40.tooling/20.hardhat/20.migrating-to-zksync.md b/content/00.build/40.tooling/20.hardhat/20.migrating-to-zksync.md new file mode 100644 index 00000000..14418be3 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/20.migrating-to-zksync.md @@ -0,0 +1,275 @@ +--- +title: Migrating to zkSync +description: Learn how to migrate an existing Hardhat project to zkSync Era. +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you are using Windows, we strongly recommend you use Windows Subsystem for Linux (also known as WSL 2). +You can use `Hardhat` and `Hardhat zkSync plugins` without it, but it will work better if you use it. + +To install Node.js using WSL 2, please read this [guide](https://learn.microsoft.com/en-us/windows/dev-environment/javascript/nodejs-on-wsl). +:: + +This guide shows you how to migrate an existing Hardhat project to zkSync Era. + +## Overview + +zkSync Era offers [multiple Hardhat plugins](/build/tooling/hardhat/getting-started) with different features. +This guide details the one you need to migrate your project to zkSync Era. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +#### Non-default paths are not supported yet + +- Contract files must be included in the `contracts` folder and deployment scripts must be included in the `deploy` folder. +- Support for custom paths will be included in the future. +:: + +## Install dependencies + +Although zkSync Era is compatible with Solidity and Vyper, +the deployed bytecode and the deployment process is different from Ethereum or other EVM blockchains. +So the first step is to install the compiler and deployer Hardhat plugins: + +If you're using Vyper, replace `@matterlabs/hardhat-zksync-solc` with `@matterlabs/hardhat-zksync-vyper` + +::code-group + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync +``` + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync +``` + +```bash [pnpm] +pnpm i -D @matterlabs/hardhat-zksync +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync --dev +``` + +:: + +## Configuration changes + +In your `hardhat.config.ts` file import the installed dependencies: + +```ts +import "@matterlabs/hardhat-zksync"; +``` + +Networks on zkSync Era require two different URL endpoints: one for layer 1 (Ethereum or Sepolia), and one for layer 2 (zkSync). +This is how you add the %%zk_testnet_name%% to your list of networks in the `hardhat.config.ts`: + +```typescript +const config: HardhatUserConfig = { + networks: { + hardhat: { + zksync: false, + }, + zkSyncTestnet: { + url: "%%zk_testnet_rpc_url%%", + ethNetwork: "%%zk_testnet_identifier%%", // or a Sepolia RPC endpoint from Infura/Alchemy/Chainstack etc. + zksync: true, + }, + }, + defaultNetwork: "zkSyncTestnet", + // configuration continues .... +}; +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +Remember to add `zksync: false` to any other networks. +:: + +Finally, add the compiler options inside a `zksolc` or `zkvyper` property. Here is the minimal configuration for a Solidity project: + +```ts +zksolc: { + version: "latest", + settings: {}, +}, +``` + +For more advanced settings, check out the [Solidity](/build/tooling/hardhat/hardhat-zksync-solc) +or [Vyper](/build/tooling/hardhat/hardhat-zksync-vyper) plugins. + +### How to configure multiple compilation targets + +To configure the `hardhat.config.ts` file to target both zkSync Era and other networks, do the following: + +1. In your `hardhat.config.ts`, configure the zkSync Era network with `zksync: true`. +2. Configure all other networks with `zksync: false`. +3. Run the compilation or deployment scripts with the network flag: `yarn hardhat compile --network zkSyncTestnet` for zkSync Era network +or `yarn hardhat compile --network sepolia` for other networks, e.g sepolia. + +```typescript +networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", // The Ethereum Web3 RPC URL. + zksync: false, // Set to false to target other networks. + }, + zkSyncTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, // Set to true to target zkSync Era. + } +}, + +``` + +### Full configuration + +Here is an example config file: + +```ts +import { HardhatUserConfig } from "hardhat/config"; + +import "@matterlabs/hardhat-zksync"; + +const config: HardhatUserConfig = { + zksolc: { + version: "latest", // Uses latest available in %%zk_git_repo_zksolc-bin%% + settings: {}, + }, + defaultNetwork: "zkSyncTestnet", + networks: { + hardhat: { + zksync: false, + }, + sepolia: { + url: "https://sepolia.com/api/abcdef12345", + zksync: false, + }, + mainnet: { + url: "https://ethereum.mainnet.com/api/abcdef12345", + zksync: false, + }, + zkSyncTestnet: { + url: "%%zk_testnet_rpc_url%%", + ethNetwork: "%%zk_testnet_identifier%%", // or a Sepolia RPC endpoint from Infura/Alchemy/Chainstack etc. + zksync: true, + }, + }, + solidity: { + version: "0.8.13", + }, + // OTHER SETTINGS... +}; + +export default config; +``` + +## Compile contracts + +To compile your contracts for zkSync Era, run: + +::code-group + +```bash [yarn] +yarn hardhat compile --network zkSyncTestnet +``` + +```bash [npm] +npm hardhat compile --network zkSyncTestnet +``` + +:: + +Passing the `--network` flag we make sure Hardhat will use the zksolc compiler (or zkvyper). +This command will compile all contracts in the `/contracts` folder and create the folders `artifacts-zk` and `cache-zk`. + +If your contracts import any non-inlineable libraries, you need to configure them in the `hardhat.config.ts` file. +Find more info and examples about [compiling libraries here](/build/tooling/hardhat/compiling-libraries). + +## Deploy contracts + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +`hardhat-deploy` version `^0.11.26` supports deployments on zkSync Era. +:: + +To deploy your contracts you need to use the `Deployer` class from the `hardhat-zksync` plugin. + +Here is a basic deployment script for a `Greeter` contract: + +```ts +import { utils, Wallet } from "zksync-ethers"; +import * as ethers from "ethers"; +import { HardhatRuntimeEnvironment } from "hardhat/types"; +import { Deployer } from "@matterlabs/hardhat-zksync"; + +// An example of a deploy script that will deploy and call a simple contract. +export default async function (hre: HardhatRuntimeEnvironment) { + console.log(`Running deploy script`); + + // Initialize the wallet. + const wallet = new Wallet("<WALLET-PRIVATE-KEY>"); + + // Create deployer object and load the artifact of the contract we want to deploy. + const deployer = new Deployer(hre, wallet); + // Load contract + const artifact = await deployer.loadArtifact("Greeter"); + + // Deploy this contract. The returned object will be of a `Contract` type, + // similar to the ones in `ethers`. + const greeting = "Hi there!"; + // `greeting` is an argument for contract constructor. + const greeterContract = await deployer.deploy(artifact, [greeting]); + + // Show the contract info. + console.log(`${artifact.contractName} was deployed to ${await greeterContract.getAddress()}`); +} +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +To obtain Sepolia ETH please refer to the [network faucets page](/ecosystem/network-faucets) for more info. +<br> +To transfer Sepolia ETH to %%zk_testnet_name%% use [bridges](https://zksync.io/explore#bridges). +:: + +Include your deployment script in the `deploy` folder and execute it running: + +::code-group + +```bash [yarn] +yarn hardhat deploy-zksync --script SCRIPT_FILENAME.ts --network zkSyncTestnet +``` + +```bash [npm] +npm hardhat deploy-zksync --script SCRIPT_FILENAME.ts --network zkSyncTestnet +``` + +:: + +If you don't include the `--script` option, all script files inside the `deploy` folder will be executed in alphabetical order. + +Check out a detailed [approach](/build/tooling/hardhat/hardhat-zksync-deploy) on how to use `hardhat-zksync-deploy` plugin. + +## Frontend integration + +You can interact with your contracts using the `zksync-ethers` Javascript library. +This SDK has been built on top of ethers and uses the same classes (`Provider`, `Contract`, `Wallet`) so in a lot of cases, +you just need to import these classes from `zksync-ethers` instead of `ethers`: + +```ts +//import { utils, Provider, Contract, Wallet } from "ethers"; +import { utils, Provider, Contract, Wallet } from "zksync-ethers"; +``` + +You also need to use the `contract ABI` from the `artifacts-zk` folder to instantiate contracts. + +Apart from the same classes and methods provided by ethers, zksync-ethers includes additional methods for zksync-specific features. + +You can read more in the [`zksync-ethers` documentation](https://docs.zksync.io/sdk/js/ethers/v6/getting-started). + +## Verify contracts + +To verify your contracts you have two options: + +<!-- TODO: update link --> +<!-- - Explorer: verify your contracts manually in the [zkSync explorer](../block-explorer/contract-verification.md) --> +- Plugin: verify your contracts programmatically using the [Hardhat verify plugin](/build/tooling/hardhat/hardhat-zksync-verify) + +If you have any problems migrating your project, [send us a message on Discord](https://join.zksync.dev/). diff --git a/content/00.build/40.tooling/20.hardhat/30.compiling-libraries.md b/content/00.build/40.tooling/20.hardhat/30.compiling-libraries.md new file mode 100644 index 00000000..f6bbcf81 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/30.compiling-libraries.md @@ -0,0 +1,156 @@ +--- +title: Compiling non-inlinable libraries +description: Learn how to handle compiling non-inlinable libraries. +--- + +Solidity libraries can be divided into two categories: + +- _Inlinable_. The ones that contain only `private` or `internal` methods. +Since they can never be called from outside, the Solidity compiler inlines them, +i.e. does not use external calls to access the library methods and uses the code of these libraries as part of the code that uses them. + +- _Non-inlinable_. The ones that have at least one `public` or `external` method. +While they may be inlined by the Solidity compiler, they are not inlined when compiled to Yul representation. +Since Yul is an intermediate step when compiling to %%zk_zkevm_label%% bytecode, this means that these libraries can not be inlined by the zkSync compiler. + +**Libraries with public methods must be deployed separately, and their addresses should be passed as arguments when compiling the main contract.** +Usage of the methods of this library will be replaced with calls to its address. + +## OpenZeppelin utility libraries + +Please note, that the total majority of the OpenZeppelin utility libraries _are_ inlinable. +That means that _there is no need to do any further actions to make them compile_. + +This section describes the compilation of non-inlinable libraries only. + +## Example + +Let's say that we have a small library that calculates the square of a number: + +```solidity +pragma solidity ^0.8.0; + +library MiniMath { + function square(uint256 x) public pure returns (uint256) { + return x*x; + } +} +``` + +And there is a smart contract that uses this library + +```solidity +pragma solidity ^0.8.0; + +import "./MiniMath.sol"; + +contract Main { + uint256 public lastNumber; + + function storeSquare(uint256 x) public { + uint256 square = MiniMath.square(x); + lastNumber = square; + } +} +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Support for missing libraries in hardhat-zksync-solc ^0.4.2**: +Version 0.4.2 introduced a mode that detects non-inlinable libraries that are missing and that are required for the compilation of contracts. +:: + +If you try to create a project with these two files following the guidelines from the +[getting started](getting-started) guide, the `yarn hardhat compile` command will fail. + +#### Using hardhat-zksync-solc version >= 0.4.2 + +Following error: + +<!-- TODO: check that the links are up-to-date. +Message is using era.zksync.io --> +```sh +zksolc compiler detected missing libraries! For more details, visit: https://era.zksync.io/docs/tools/hardhat/compiling-libraries.html. +To compile and deploy libraries, please run: `yarn hardhat deploy-zksync:libraries` +For more details on how to use deploy-zksync:libraries task from hardhat-zksync-deploy plugin, visit: https://era.zksync.io/docs/tools/hardhat/hardhat-zksync-deploy.html. +``` + +To address the error, you can follow the instructions provided in the output, which is the recommended approach. +For more details, please refer to [this section](#automatic-deployment). +Alternatively, if you prefer a manual resolution for the libraries, you can find detailed instructions in [this section](#manual-deployment). + +Choose the method that best suits your preferences or requirements. + +## Non-inline libraries deployment + +### Automatic deployment + +::callout{icon="i-heroicons-information-circle" color="blue"} +This approach is effective only with specific plugin versions: + +- `hardhat-zksync-solc` >= 0.4.2 +- `hardhat-zksync-deploy` >= 0.6.5 + +Make sure that you are using the specified versions or a later versions to ensure compatibility with the described resolution method. +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +Vyper does not support automatic deployment of missing libraries, and the process needs to be handled manually. +:: + +`hardhat-zksync-deploy` plugin has the capability to automatically deploy all missing libraries generated by compiler. +For additional information, you may refer to the [documentation](hardhat-zksync-deploy#compilation-and-deployment-support-for-missing-libraries). +This documentation provides details on how the tool handles the compilation and deployment of libraries that are currently missing. + +### Manual deployment + +To resolve the issue, you need to create _a separate project_, where only the library file will be located. +After deploying _only_ the library to zkSync Era, you should get the address of the deployed library and pass it to the compiler settings. +The process of deploying the library is the same as deploying a smart contract. +You can learn how to deploy smart contracts on zkSync Era in the [getting started](getting-started#compile-and-deploy-a-contract) guide. + +Let's say that the address of the deployed library is `0xF9702469Dfb84A9aC171E284F71615bd3D3f1EdC`. +To pass this address to the compiler parameters, open the `hardhat.config.ts` file of the project where the `Main` contract is located +and add the `libraries` section in the `zksolc` plugin properties: + +```typescript +import "@matterlabs/hardhat-zksync-deploy"; +import "@matterlabs/hardhat-zksync-solc"; + +module.exports = { + zksolc: { + version: "latest", // Uses latest available in %%zk_git_repo_zksolc-bin%% + settings: { + libraries: { + "contracts/MiniMath.sol": { + MiniMath: "0xF9702469Dfb84A9aC171E284F71615bd3D3f1EdC", + }, + }, + }, + }, + defaultNetwork: "zkTestnet", + networks: { + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // URL of the zkSync network RPC + ethNetwork: "%%zk_testnet_identifier%%", // Can also be the RPC URL of the Ethereum network (e.g. `https://sepolia.infura.io/v3/<API_KEY>`) + zksync: true, + }, + }, + solidity: { + version: "0.8.13", + }, +}; +``` + +The address of the library is passed in the following lines: + +```typescript +libraries: { + 'contracts/MiniMath.sol': { + 'MiniMath': '0xF9702469Dfb84A9aC171E284F71615bd3D3f1EdC' + } +}, +``` + +where `'contracts/MiniMath.sol'` is the location of the library's Solidity file and `MiniMath` is the name of the library. + +Now, running `yarn hardhat compile` should successfully compile the `Main` contract. diff --git a/content/00.build/40.tooling/20.hardhat/35.hardhat-zksync.md b/content/00.build/40.tooling/20.hardhat/35.hardhat-zksync.md new file mode 100644 index 00000000..32fe822b --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/35.hardhat-zksync.md @@ -0,0 +1,73 @@ +--- +title: hardhat-zksync +description: +--- + +The hardhat-zksync plugin provides a convenient method for bundling and accessing a range of zkSync-related Hardhat plugins. +This approach simplifies the process of utilizing these plugins and promotes ease of use. + +List of contained plugins: + +- [hardhat-zksync-solc](hardhat-zksync-solc) +- [hardhat-zksync-deploy](hardhat-zksync-deploy) +- [hardhat-zksync-upgradable](hardhat-zksync-upgradable) +- [hardhat-zksync-verify](hardhat-zksync-verify) +- [hardhat-zksync-node](hardhat-zksync-node) +- [hardhat-zksync-ethers](hardhat-zksync-ethers) + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Popular Hardhat plugins**: +You can find a list of all official plugins [here](getting-started). +Also, zkSync supports some other [popular plugins](other-plugins) that can be used. +:: + +### Installation + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync --dev +``` + +:: + +### Usage + +After installing it, add the plugin to your Hardhat config: + +```javascript +import "@matterlabs/hardhat-zksync"; +``` + +With the `hardhat-zksync` plugin installed and imported, you will have access to all of the supported plugins +and will be able to use them as needed in your project. +This plugin enables access to all commands available for each specific plugin, making them readily accessible with just the usage of this plugin. + +::callout{icon="i-heroicons-information-circle" color="blue"} +To learn more about using any of the plugins that are supported by the hardhat-zksync plugin, you can refer to their documentation above. +:: + +For certain tasks present in the plugins encompassed by this plugin, it overrides them with new features and parameters. +These tasks streamline common functionalities into a simplified workflow. + +Here is a list of overriden tasks where this plugin adds new optional parameter `--verify`: + +- `deploy-zksync:contract` +- `deploy-zksync:proxy` +- `upgrade-zksync:proxy` +- `deploy-zksync:beacon` +- `upgrade-zksync:beacon` + +The `--verify` parameter allow the task to immediately verify all deployed and upgraded contracts when task is called. + +To check other parameters present in these tasks, please check the documentation pages for [hardhat-zksync-deploy](hardhat-zksync-deploy) and [hardhat-zksync-upgradable](hardhat-zksync-upgradable). diff --git a/content/00.build/40.tooling/20.hardhat/40.hardhat-zksync-solc.md b/content/00.build/40.tooling/20.hardhat/40.hardhat-zksync-solc.md new file mode 100644 index 00000000..0cf786bd --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/40.hardhat-zksync-solc.md @@ -0,0 +1,231 @@ +--- +title: hardhat-zksync-solc +description: +--- + +This plugin is used to provide a convenient interface for compiling Solidity smart contracts before deploying them to zkSync Era. + +Learn more about the latest updates in the [changelog](%%zk_git_repo_hardhat-zksync%%/blob/main/packages/hardhat-zksync-solc/CHANGELOG.md). + +## Prerequisite + +To use the `hardhat-zksync-solc` in your project, we recommend that: + +- You have Node installed and `yarn` or `npm` package manager. + +## Installation + +[@matterlabs/hardhat-zksync-solc](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-solc) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-solc +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-solc +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-solc --dev +``` + +:: + +## Configuration + +Import the package in the `hardhat.config.ts` file: + +```ts +import "@matterlabs/hardhat-zksync-solc"; +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Default config in hardhat-zksync-solc ^0.4.0**: +Version 0.4.0 introduced a default configuration making all parameters optional. +You can override the default configuration in the `hardhat.config.ts` file. +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Support for missing libraries in hardhat-zksync-solc ^0.4.2**: +Version 0.4.2 introduced a mode that detects non-inlinable libraries that are missing and that are required for the compilation of contracts. +This feature works with the `hardhat-zksync-deploy` plugin, specifically the `deploy-zksync:libraries` task, +to compile and deploy the missing libraries. There are no new commands, just follow the instructions logged by the `yarn hardhat compile` output. + +Upon encountering missing non-inline libraries during the compilation process, the compiler logged steps to follow, while compilation is not valid. +This results with empty `artifacts-zk` and `cache-zk` folders. +:: + +Any configuration parameters should be added inside a `zksolc` property in the `hardhat.config.ts` file: + +```typescript +zksolc: { + version: "latest", // optional. + settings: { + compilerPath: "zksolc", // optional. Ignored for compilerSource "docker". Can be used if compiler is located in a specific folder + libraries:{}, // optional. References to non-inlinable libraries + missingLibrariesPath: "./.zksolc-libraries-cache/missingLibraryDependencies.json", // optional. This path serves as a cache that stores all the libraries that are missing or have dependencies on other libraries. A `hardhat-zksync-deploy` plugin uses this cache later to compile and deploy the libraries, especially when the `deploy-zksync:libraries` task is executed + isSystem: false, // optional. Enables Yul instructions available only for zkSync system contracts and libraries + forceEvmla: false, // optional. Falls back to EVM legacy assembly if there is a bug with Yul + optimizer: { + enabled: true, // optional. True by default + mode: '3', // optional. 3 by default, z to optimize bytecode size + fallback_to_optimizing_for_size: false, // optional. Try to recompile with optimizer mode "z" if the bytecode is too large + }, + experimental: { + dockerImage: '', // deprecated + tag: '' // deprecated + }, + contractsToCompile: [] //optional. Compile only specific contracts + } +}, + +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Compilers are no longer released as Docker images and its usage is no longer recommended. +:: + +- `version` is the `zksolc` compiler version. + Compiler versions can be found in [the following repository](%%zk_git_repo_zksolc-bin%%). +- `compilerSource` indicates the compiler source and can be either `binary` (default) or `docker` (deprecated). +If there isn't a compiler binary already installed, the plugin will automatically download it. +- `compilerPath` (optional) is a field with the path to the `zksolc` binary. By default, the binary in `$PATH` is used. +- `libraries` if your contract uses non-inlinable libraries as dependencies, they have to be defined here. +Learn more about [compiling libraries here](compiling-libraries) +- `missingLibrariesPath` (optional) serves as a cache that stores all the libraries that are missing or have dependencies on other libraries. +A `hardhat-zksync-deploy` plugin uses this cache later to compile and deploy the libraries, +especially when the `deploy-zksync:libraries` task is executed. +Defaults to `./.zksolc-libraries-cache/missingLibraryDependencies.json`. +- `isSystem` - required if contracts use enables Yul instructions available only for zkSync system contracts and libraries +- `forceEvmla` - falls back to EVM legacy assembly if there is an issue with the Yul IR compilation pipeline. +- `optimizer` - Compiler optimizations: + - `enabled`: `true` (default) or `false`. + - `mode`: `3` (default) recommended for most projects. Mode `z` reduces bytecode size for large projects that make heavy use of `keccak` and far calls. + - `fallback_to_optimizing_for_size` (optional) indicates that the compiler will try to recompile with optimizer mode "z" if the bytecode is too large. +- `metadata`: Metadata settings. If the option is omitted, the metadata hash appends by default: + - `bytecodeHash`: Can only be `none`. It removes metadata hash from the bytecode. +- `dockerImage` and `tag` are deprecated options used to identify the name of the compiler docker image. +- `contractsToCompile` (optional) field is utilized to compile only the specified contracts. +The contract names do not necessarily need to be written in full qualified form. +The plugin will perform an include operation, attempting to match the provided contract names. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**forceEvmla usage** + +Setting the `forceEvmla` field to true can have the following negative impacts: + +- No support for recursion. +- No support for internal function pointers. +- Possible contract size and performance impact. + +For Solidity versions older than 0.8, only this compilation mode is available and it is used by default. +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +`fallback_to_optimizing_for_size` option is supported for zksolc compiler version 1.3.21 or higher. +:: + +### Compiler informations + +The zksolc compilers are stored in the cache folder with the path `{cache}/hardhat-nodejs/compilers-v2/zksolc`. +In this location, you can inspect the locally stored compiler versions. + +`{cache}` is a placeholder for a path that is resolved by Hardhat + +The `compilerVersion.json` file is used by the plugin to get the latest available version and the minimum required compiler version. +This file undergoes invalidation every 24 hours (currently), subsequently being updated with fresh information. +This approach is implemented to provide a caching mechanism, avoiding the risk of encountering GitHub throttling issues during fetching new releases. + +### zkSync Era Solidity compiler + +Due to [the identified limitations](/zk-stack/components/compiler/toolchain/solidity#limitations) +of the [upstream Solidity compiler](https://github.com/ethereum/solidity), +our team has developed [a new edition](%%zk_git_repo_era-solidity%%) +of the compiler, which effectively addresses and resolves these constraints. + +For usage of EraVM compiler, `eraVersion` should be added inside `solidity` property in the `hardhat.config.ts` file: + +```typescript +solidity: { + version: "0.8.17", + eraVersion: "1.0.0" //optional. Compile contracts with EraVM compiler +}, + +``` + +- `eraVersion` - (optional) field used to specify version of EraVM compiler + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**eraVersion usage** + +Using latest as the field value is not supported. Instead, the eraVersion field must be filled with a specific version. +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**warning EraVM compiler usage** + +To use the EraVM compiler, the zksolc compiler version must be equal to or greater than 1.3.22. +:: + +### Network configuration + +Configure the `zksync` parameter in the networks to enable the zksolc compiler: + +```ts +defaultNetwork: "zkSyncTestnet", +networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", // The Ethereum Web3 RPC URL (optional). + zksync: false, // disables zksolc compiler + }, + zkSyncTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, // enables zksolc compiler + } +}, +``` + +- `zksync` network option indicates whether zksolc is enabled on a certain network. `false` by default. +Useful for multichain projects in which you can enable `zksync` only for specific networks. + +## Commands + +::code-group + +```bash [yarn] +yarn hardhat compile +``` + +```bash [npm] +npx hardhat compile +``` + +:: + +Compiles all the smart contracts in the `contracts` directory and creates the `artifacts-zk` folder with all the compilation artifacts, +including factory dependencies for the contracts, which could be used for contract deployment. + +To understand what the factory dependencies are, read more about them +[here](/build/developer-reference/ethereum-differences/contract-deployment#note-on-factory-deps) documentation. + +## Troubleshooting + +#### Error in plugin @matterlabs/hardhat-zksync-solc: Invalid zksolc compiler version + +This error is returned when the version defined in the `hardhat.config.ts` file is lower +than the minimal required (versions are defined in file [compilerVersion.json](#compiler-informations)). +Update the version to solve the issue. + +#### Why is there an `unexpected end of JSON input` compilation error? + +This is an error that is usually thrown when compiling a large smart contract codebase. + +If you encounter such an error, please do the following: + +- Update the `@matterlabs/hardhat-zksync-solc` library and try to re-compile the smart contracts afterwards. +- If after the recompilation you get the `Library not found` error, then you should follow the instructions from [here](compiling-libraries). diff --git a/content/00.build/40.tooling/20.hardhat/50.hardhat-zksync-vyper.md b/content/00.build/40.tooling/20.hardhat/50.hardhat-zksync-vyper.md new file mode 100644 index 00000000..b210ec04 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/50.hardhat-zksync-vyper.md @@ -0,0 +1,100 @@ +--- +title: hardhat-zksync-vyper +description: +--- + +The [@matterlabs/hardhat-zksync-vyper](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-vyper) plugin +provides an interface for compiling Vyper smart contracts before deploying them to zkSync Era. + +Learn more about the latest updates in the [changelog](%%zk_git_repo_hardhat-zksync%%/blob/main/packages/hardhat-zksync-vyper/CHANGELOG.md). + +## Prerequisite + +To use the `hardhat-zksync-vyper` in your project, we recommend that: + +- You have Node installed and `yarn` or `npm` package manager. + +## Installation + +[@matterlabs/hardhat-zksync-vyper](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-vyper) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-vyper @nomiclabs/hardhat-vyper +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-vyper +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-vyper --dev +``` + +:: + +## Configuration + +::callout{icon="i-heroicons-information-circle" color="blue"} +`hardhat-zksync-vyper` v0.2.0 introduced a default configuration so all parameters are optional. +:: + +Any configuration parameters should be added inside a `zkvyper` property in the `hardhat.config.ts` file: + +```ts +zkvyper: { + version: "latest", // Uses latest available in %%zk_git_repo_zkvyper-bin%% + settings: { + // compilerPath: "zkvyper", // optional field with the path to the `zkvyper` binary. + libraries: {}, // optional. References to non-inlinable libraries + optimizer: { + mode: '3' // optional. 3 by default, z to optimize bytecode size + fallback_to_optimizing_for_size: false, // optional. Try to recompile with optimizer mode "z" if the bytecode is too large + }, + experimental: { + dockerImage: '', // deprecated + tag: '' // deprecated + }, + }, + } +``` + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Compilers are no longer released as Docker images and its usage is no longer recommended. +:: + +- `version`: The `zkvyper` compiler version. Default value is `latest`. +Find the latest compiler versions in the [zkvyper repo](%%zk_git_repo_zkvyper-bin%%). +- `compilerSource`: Indicates the compiler source and can be either `binary`. (A `docker` option is no longer recommended). +If there is no previous installation, the plugin automatically downloads one. +- `optimizer` - Compiler optimizations: + - `mode`: `3` (default) recommended for most projects. Mode `z` reduces bytecode size for large projects that make heavy use of `keccak` and far calls. + - `fallback_to_optimizing_for_size` (optional) indicates that the compiler will try to recompile with optimizer mode "z" if the bytecode is too large. +- `compilerPath`: Optional field with the path to the `zkvyper` binary. By default, the binary in `$PATH` is used. +- `libraries`: Define any non-inlinable libraries your contracts use as dependencies here. Learn more about [compiling libraries](compiling-libraries). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +`fallback_to_optimizing_for_size` option is supported for zkvyper compiler version 1.3.15 or higher. +:: + +## Commands + +::code-group + +```bash [yarn] +yarn hardhat compile + +``` + +```bash [npm] +npx hardhat compile + +``` + +:: + +Compiles all the smart contracts in the `contracts` directory and creates the `artifacts-zk` folder with all the compilation artifacts, +including factory dependencies for the contracts, which could be used for contract deployment. diff --git a/content/00.build/40.tooling/20.hardhat/60.hardhat-zksync-deploy.md b/content/00.build/40.tooling/20.hardhat/60.hardhat-zksync-deploy.md new file mode 100644 index 00000000..c521496c --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/60.hardhat-zksync-deploy.md @@ -0,0 +1,681 @@ +--- +title: hardhat-zksync-deploy +description: +--- + +This plugin provides utilities for deploying smart contracts on zkSync Era with artifacts built by the `@matterlabs/hardhat-zksync-solc` +or `@matterlabs/hardhat-zksync-vyper` plugins. + +## Prerequisite + +To use the `hardhat-zksync-deploy` in your project, we recommend that: + +- You have a Node installation and `yarn` or `npm` package manager. +- You are already familiar with deploying smart contracts on zkSync Era. + +If not, please refer to the first section of the [zkSync 101 material](/build/zksync-101/hello-zksync). + +- A wallet with sufficient Sepolia `ETH` on Ethereum and %%zk_testnet_name%% to pay for deploying smart contracts on testnet. + +You can get Sepolia ETH from the [network faucets](/ecosystem/network-faucets). + +- Get testnet `ETH` for zkSync Era using [bridges](https://zksync.io/explore#bridges) to bridge funds to zkSync. +- You know [how to get your private key from your MetaMask wallet](https://support.metamask.io/hc/en-us/articles/360015289632-How-to-export-an-account-s-private-key). + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Local zkSync Testing with zksync-cli**: +Skip the hassle for test ETH by using `zksync-cli` for local testing. +Simply execute `npx zksync-cli dev start` to initialize a local zkSync development environment, which includes local Ethereum and zkSync nodes. +This method allows you to test contracts without requesting external testnet funds. +Explore more in the [zksync-cli documentation](/build/tooling/zksync-cli). +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +#### Version Compatibility Warning + +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + + - Compatible with ethers **v6** (⭐ Recommended) +:: + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Deployer extension inside Hardhat Runtime Environment (HRE)**: +To use new features like the deployer extension inside Hardhat Runtime Environment (HRE), +caching mechanism, and support for script paths, tags, dependencies, and priority, the plugin versions should be as follows: + +- For **v6**, the version should be **1.2.0** or higher. +- For **v5**, the version should be **0.8.0** or higher. +:: + +## Setup + +[@matterlabs/hardhat-zksync-deploy](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-deploy) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-deploy ethers zksync-ethers +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-deploy +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-deploy --dev +``` + +:: + +## Network Configuration + +In the `hardhat.config.ts` file, specify zkSync Era and Ethereum networks in the `networks` object. + +```typescript +networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true + } +}, +// defaultNetwork: "zkTestnet", // optional (if not set, use '--network zkTestnet') +``` + +- `zkTestnet` is an arbitrary zkSync Era network name. You can select this as the default network using the `defaultNetwork` property. +- `url` is a field containing the URL of the zkSync Era node in case of the zkSync Era network (with `zksync` flag set to `true`), +or the URL of the Ethereum node. This field is required for all zkSync Era and Ethereum networks used by this plugin. +- `ethNetwork` is a field with the URL of the Ethereum node. +You can also provide network name (e.g. `%%zk_testnet_identifier%%`) as the value of this field. +In this case, the plugin will either use the URL of the appropriate Ethereum network configuration (from the `networks` section), +or the default `ethers` provider for the network if the configuration is not provided. +This field is required for all zkSync networks used by this plugin. +- `zksync` is a flag that indicates if the network is zkSync Era. This field needs to be set to `true` for all zkSync Era networks; it is `false` by default. + +## Usage in deployment scripts + +### `Deployer` export + +The main export of this plugin is the `Deployer` class. It is used to wrap a `zksync-ethers` Wallet instance +and provides a convenient interface to deploy smart contracts and account abstractions. It's main methods are: + +```typescript +class Deployer { + + /** + * @param hre Hardhat runtime environment. This object is provided to scripts by hardhat itself. + * @param zkWallet The wallet which will be used to deploy the contracts. + * @param deploymentType Optional deployment type that relates to the ContractDeployer system contract function to be called. Defaults to deploying regular smart contracts. + */ + constructor(hre: HardhatRuntimeEnvironment, zkWallet: zk.Wallet, deploymentType?: zk.types.DeploymentType) + + /** + * Created a `Deployer` object on ethers.Wallet object. + * + * @param hre Hardhat runtime environment. This object is provided to scripts by hardhat itself. + * @param ethWallet The wallet used to deploy smart contracts. + * @param deploymentType The optional deployment type that relates to the `ContractDeployer` system contract function to be called. Defaults to deploying regular smart contracts. + */ + static fromEthWallet(hre: HardhatRuntimeEnvironment, ethWallet: ethers.Wallet, deploymentType?: zk.types.DeploymentType) + + /** + * Loads an artifact and verifies that it was compiled by `zksolc`. + * + * @param contractNameOrFullyQualifiedName The name of the contract. + * It can be a bare contract name (e.g. "Token") if it's + * unique in your project, or a fully qualified contract name + * (e.g. "contract/token.sol:Token") otherwise. + * + * @throws Throws an error if a non-unique contract name is used, + * indicating which fully qualified names can be used instead. + * + * @throws Throws an error if an artifact was not compiled by `zksolc`. + */ + public async loadArtifact( + contractNameOrFullyQualifiedName: string + ): Promise<ZkSyncArtifact> + + /** + * Estimates the price of calling a deploy transaction in a certain fee token. + * + * @param artifact The previously loaded artifact object. + * @param constructorArguments The list of arguments to be passed to the contract constructor. + * + * @returns Calculated fee in ETH wei. + */ + public async estimateDeployFee( + artifact: ZkSyncArtifact, + constructorArguments: any[] + ): Promise<bigint> + + /** + * Sends a deploy transaction to the zkSync network. + * For now it uses default values for the transaction parameters: + * + * @param contractNameOrArtifact The previously loaded artifact object, or contract name that will be resolved to artifact in the background. + * @param constructorArguments The list of arguments to be passed to the contract constructor. + * @param overrides Optional object with additional deploy transaction parameters. + * @param additionalFactoryDeps Additional contract bytecodes to be added to the factory dependencies list. + * The fee amount is requested automatically from the zkSync Era server. + * + * @returns A contract object. + */ + public async deploy( + contractNameOrArtifact: ZkSyncArtifact | string, + constructorArguments: any[], + overrides?: Overrides, + additionalFactoryDeps?: ethers.BytesLike[], + ): Promise<zk.Contract> +``` + +To see an example script of how to use a `Deployer` class to deploy a contract, check out the [deployment section of the quickstart](getting-started#compile-and-deploy-a-contract). + +::callout{icon="i-heroicons-information-circle" color="blue"} +**contractNameOrArtifact parameter within the deploy method** + +In the method description, it's evident that `contractNameOrArtifact` can accept two types of objects. +One type represents a loaded artifact, while the other type is a string representing a contract name, +which the `deploy` method will internally convert to the corresponding artifact. + +```typescript +const wallet = new zk.Wallet("PRIVATE_KEY"); +const deployer = new Deployer(hre, zkWallet); +............ +// Provided previously loaded artifact +const artifact = await deployer.loadArtifact("ContractName"); +const contract = await deployer.deploy(artifact); +// Provided contract name +const contract = await deployer.deploy("ContractName"); +``` + +:: + +### Environment extensions + +The plugin adds a deployer extension object to the Hardhat Runtime Environment (HRE), which allows us to access it using `hre.deployer`. + +### Configuration + +To extend the configuration to support automatic deployment inside scripts without the need for manually creating a wallet, +you can add an `accounts` field to the specific network configuration in the `networks` section of the `hardhat.config.ts` file. +This accounts field can support an array of private keys or a mnemonic object. + +If the `accounts` section contains an array of private keys, the deploy method will use index `0` by default +unless another wallet is explicitly set in the script. + +```typescript +const config: HardhatUserConfig = { + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // ADDITION + accounts: ['0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3', '0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959'] // The private keys for the accounts used in the deployment process. + accounts: { + mnemonic: 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle' + } + // Mnemonic used in the deployment process + } + }, +}; +``` + +- `accounts` represents a list of the private keys or mnemonic object for the account used in the deployment process. + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Accounts on zkSync Era Test Node or zksync-cli Local Node**: +`accounts` object will be automatically be populated with rich accounts if used network is zkSync Era Test Node or zksync-cli Local Node + +:: + +To establish a default index per network, which is by default `0`, you can include a `deployerAccounts` section in your `hardhat.config.ts` file. +This enables the plugin to utilize the designated default indexes when accessing `deploy` method in deployment scripts, +thereby granting greater control over the selection of the deployment account for each network. + +```typescript +const config: HardhatUserConfig = { + // ADDITION + deployerAccounts: { + 'zkTestnet': 1, // The default index of the account for the specified network. + //default: 0 // The default value for not specified networks. Automatically set by plugin to the index 0. + }, + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + accounts: ['0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3', '0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959'] // The private keys for the accounts used in the deployment process. + accounts: { + mnemonic: 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle' + } // Mnemonic used in the deployment process + } + }, +}; +``` + +- `deployerAccounts` represents an object where the default index of the accounts is provided and automatically used in the deployment scripts. +If the network name is not specified inside the object, the default index of the account will be `0`. +We can change and deafult index for not specified networks if we override `default` name with index that we want. + +The described objects work together to provide users with a better deployment experience, eliminating the need for manual wallet initialization. + +### Methods + +Methods available for use in `hre.deployer` are the same as those available in the `Deployer` class object, as described [here.](#deployer-export) +Additionally, `hre.deployer` is extended with specific methods to facilitate the deployment process, making it more straightforward. + +```typescript + /** + * Set deployment type + * + * @param deployment type for further deployment actions + * + */ + public setDeploymentType( + deploymentType: zk.types.DeploymentType + ): void + + /** + * Set a new Wallet + * + * @param wallet object to be used in further deployment actions + * + */ + public setWallet( + wallet: zk.Wallet + ): void + + /** + * Returns a new Wallet connected to the selected network + * + * @param privateKeyOrAccountNumber Optional private key or index of the account + * + * @returns A wallet object. If param is not provided, default wallet will be returned. + */ + public async getWallet( + privateKeyOrAccountNumber?: string | number + ): Promise<zk.Wallet> +``` + +### Transition from `Deployer` object to the `hre.deployer` + +The deployment logic remains the same, but instead of instantiating a `Deployer` class, +you directly access the deployer object provided by `hre.deployer`. +This simplifies the deployment process and enhances the developer experience. + +```typescript +// Using Deploy exports for the deployment +const wallet = new zk.Wallet("PRIVATE_KEY"); +const deployer = new Deployer(hre, zkWallet); +const artifact = await deployer.loadArtifact("ContractName"); +const contract = await deployer.deploy(artifact, []); + +// Using hre.deployer with connected wallet provided by hardhat.config.ts configuration +const artifact = await hre.deployer.loadArtifact("ContractName"); +const contract = await hre.deployer.deploy(artifact, []); +``` + +### Usage of the getWallet and setWallet + +To simplify and improve the user experience, you can use the `getWallet` and `setWallet` methods provided by `hre.deployer` +to generate a new wallet for deployment if that is needed and to change current wallet. + +```typescript +// To get the wallet for index 2 of the network accounts object inside hardhat.config.ts +const wallet = await hre.deployer.getWallet(2); +// To get the wallet for the provided private key +const wallet = await hre.deployer.getWallet("0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959"); + +// Set a new wallet +hre.deployer.setWallet(wallet); + +const artifact = await hre.deployer.loadArtifact("ContractName"); +const contract = await hre.deployer.deploy(artifact, []); +``` + +## Caching mechanism + +The `hardhat-zksync-deploy` plugin supports a caching mechanism for contracts deployed on the same network, +and by default, this feature is enabled for every deployment with specific network unless specified otherwise. +For each deployment within your project, a new `deployments-zk` folder is created. +Inside this folder, you can find subfolders for each network specified in the `hardhat.config.ts` file. +Each network folder contains JSON files named after deployed contracts where caching purposes information are stored, +and additionally, a `.chainId` file contains the chainId specific to that network. + +To explicitly use a cache mechanism or force deploy for a specific network in your `hardhat.config.ts` file, +you would indeed need to set the `forceDeploy` flag for that network in the networks section. + +```typescript +const config: HardhatUserConfig = { + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // ADDITION + forceDeploy: true, // Specify is deploy proccess will use cache mechanism or it will force deploy of the contracts + }, + }, +}; +``` + +If the `forceDeploy` flag is set to `true` for a specific network in your hardhat.config.ts file, +it indicates that the deployment process will force deploy contracts to that network, bypassing any cache mechanism. + +Conversely, if the `forceDeploy` flag is set to `false` or not specified for a network, +`hardhat-zksync-deploy` will use caching mechanism during deployment. +This means it will check whether the contracts have changed since the last deployment, +and if not, it will reuse the already deployed contracts instead of redeploying them. + +::callout{icon="i-heroicons-information-circle" color="blue"} +**default value for forceDeploy**: If a value isn't explicitly defined, it automatically defaults to `true`. +:: + +## Scripts configuration + +Scripts used for deployments have additional features that can provide the better experience and efficiency of the deployment process. + +### Deployment scripts path + +Configuring a scripts path can be achieved in two ways: + +- setting up global paths used for all networks. +- configuring network-specific paths used exclusively for each network. This kind of paths overrides a global paths. + +#### Global Deployment Paths + +To enable the plugin's usage of global custom deploy scripts, specify the directory path +containing these scripts within the `deployPaths` section nested inside the `paths` section of your `hardhat.config.ts` file. + +```typescript +const config: HardhatUserConfig = { + // ADDITION + paths: { + deployPaths: "deploy-zksync", //single deployment directory + deployPaths: ["deploy", "deploy-zksync"], //multiple deployment directories + } + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + } + }, +} +``` + +- `deployPaths` Specify deployment directories, you can use either a single object or an array structure. + +::callout{icon="i-heroicons-information-circle" color="blue"} +The default path, if not explicitly set, is the `deploy` folder inside the project's root directory. +:: + +#### Network-Specific Deployment Paths + +To configure network-specific paths, the `hardhat.config.ts` configuration needs to be extended +with a `deployPaths` property inside the network object inside `networks` section. + +```typescript +const config: HardhatUserConfig = { + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", + ethNetwork: "%%zk_testnet_identifier%%", + // ADDITION + deployPaths: "deploy-zksync", //single deployment directory + deployPaths: ["deploy", "deploy-zksync"], //multiple deployment directories + zksync: true, + }, + }, +}; +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +Network-specific paths will override a global path, ensuring that only scripts within the directories configured for the specific network are executed. +:: + +### Deployment scripts, tags, dependencies and priority + +Deployment scripts can be tagged, allowing for easy categorization and organization. +Dependencies between scripts can be specified to ensure proper execution order, +and priority levels can be assigned to determine the sequence in which scripts are run. + +- `tags` An array of strings representing lables that can be assigned to scripts for categorization and organization. +- `dependencies` An array of script tags specifying the dependencies of a script, ensuring proper execution order based on their dependencies. +- `priority` An integer value indicating the priority level of a script, determining the sequence in which scripts are executed. +If a script has a higher value for priority field, it will be executed first unless it depends on another script. + +Examples: + +```typescript +// Script 001_deploy.ts +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +const deployScript = async function (_: HardhatRuntimeEnvironment) { + console.log("Deploy script"); +}; + +export default deployScript; +deployScript.priority = 800; +deployScript.tags = ["first"]; +deployScript.dependencies = ["second"]; + +// Script 002_deploy.ts +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +const deployScript = async function (_: HardhatRuntimeEnvironment) { + console.log("Deploy script"); +}; + +export default deployScript; +deployScript.priority = 650; +deployScript.tags = ["second"]; + +// Script 003_deploy.ts +import { HardhatRuntimeEnvironment } from "hardhat/types"; + +const deployScript = async function (_: HardhatRuntimeEnvironment) { + console.log("Deploy script"); +}; + +export default deployScript; +deployScript.priority = 1000; +``` + +For the specific scripts, we observe that `001_deploy.ts` and `002_deploys.ts` are tagged, +and `001_deploy.ts` depends on deployment scripts with the tag `second`. +Additionally, a priority is set for all three scripts. +As a result, when starting the deployment process (running scripts), the order of script execution will be as follows: + +1. `003_deploys.ts`: This script has the highest priority and is not dependent on any other script. +1. `002_deploy.ts`: This script needs to be executed second because it is tagged with `second`, and `001_deploy.ts` depends on that script. +1. `001_deploy.ts`: Although this script has a higher priority than `002_deploy.ts`, it depends on the latter, so it will be executed last. + +::callout{icon="i-heroicons-information-circle" color="blue"} +The default value for **tags** is `default`, and the default value for **priority** is `0`. +:: + +## Compilation and Deployment Support for Missing Libraries + +This plugin facilitates the compilation and deployment of missing libraries for users. +By leveraging the `@matterlabs/hardhat-zksync-solc` plugin, users can obtain a file that not only contains specifics +about the missing libraries for compilation but also showcases how they interlink with other dependent libraries. +For more information about missing libraries during the compilation process, please refer to [this link](compiling-libraries). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Starting from version 1.13.14, the zksolc compiler has been enhanced to identify missing libraries. +:: + +Complex library dependency tree is also supported. It ensures libraries are compiled and deployed in a structured manner, +starting from the most foundational library to the topmost dependent one. + +Example: + +```text +Consider three libraries where: + +- Library A is dependent on Library B +- Library B is dependent on Library C + +A +└── B + └── C + +Deployment workflow: +1. Compile and deploy Library C. +2. Compile and deploy Library B, referencing the deployed address of Library C. +3. Compile and deploy Library A, referencing the deployed address of Library B. +``` + +The feature to automatically update the Hardhat user configuration with deployed addresses of libraries in the `zksolc` object is supported as well. + +```ts +zksolc: { + compilerSource: 'binary', + settings: { + libraries: { + "contracts/LibraryA.sol": { + "LibraryA": "0x13706Afd344d905BB9Cb50752065a67Fa8d09c70" + }, + "contracts/LibraryB.sol": { + "LibraryB": "0x4cf2E778D384746EaB115b914885e2bB18E893E2" + } + } + } + }, + // If the settings and libraries don't exist, they'll be created. +``` + +For a step-by-step guide on how to deploy missing libraries, see the `deploy-zksync:libraries` command below. + +## Commands + +`yarn hardhat deploy-zksync` -- runs through all the scripts. + +- To run a specific script, add the `--script` argument, e.g. `hardhat deploy-zksync --script 001_deploy.ts`. Runs script with name `001_deploy.ts`. +- To run a scripts with specific tags add the `--tags` argument, e.g `hardhat deploy-zksync --tags all`. Run all scripts with tag `all`. +- To run on a specific zkSync Era network, use the standard Hardhat `--network` argument, e.g. `--network zkTestnet`. +The network with the name `zkTestnet` needs to be configured in the `hardhat.config.ts` file, +with all required fields stated above, or specify `defaultNetwork` in `hardhat.config.ts` file. + +::callout{icon="i-heroicons-information-circle" color="blue"} +If network argument `--network` or `defaultNetwork` configuration are not specified, +local setup with `http://localhost:8545` (Ethereum RPC URL) and `http://localhost:3050` (zkSync Era RPC URL), +will be used. In this case zkSync Era network will not need to be configured in `hardhat.config.ts` file. + +For more details about a dockerized local setup, check out [Local testing](/build/test-and-debug/dockerized-l1-l2-nodes). +:: + +`yarn hardhat deploy-zksync:contract --contract-name <contract name or FQN>` + +Provides an easy and fast way to deploy the given contract on the specified network. +If the provided command for deploying a single contract is insufficient and you require additional flexibility, +such as incorporating additional dependencies or overrides, it would be advisable to utilize a script-based approach. + +- `--contract-name <contract name or FQN>` - contract name or FQN, required argument in all tasks, e.g. `hardhat deploy-zksync:proxy --contract-name SomeContract`. +- `<constructor arguments>` - list of constructor arguments, e.g. `hardhat deploy-zksync:proxy --contract-name Greeter 'Hello'`. +- `--constructor-args <module name>` - name of javascript module containing complex constructor arguments. +Works only if `<constructor arguments>` are not provided +e.g. `hardhat deploy-zksync:contract --contract-name ComplexContract --constructor-args args.js`. +Example of `args.js` : + +```typescript +module.exports = [ + "a string argument", + "0xabcdef", + "42", + { + property1: "one", + property2: 2, + }, +]; +``` + +- `--no-compile`- skip the compilation process, e.g. `hardhat deploy-zksync:beacon --contract-name Contract --no-compile`. +- `--deployment-type` - specify which deployer smart contract function will be called. +Permissible values for this parameter include `create`, `create2`, `createAccount`, and `create2Account`. +If this parameter is omitted, the default value will be `create`, +e.g. `hardhat deploy-zksync:beacon --contract-name Greeter 'Hello' --deployment-type create2`. + +The account used for deployment will be the one specified by the `deployerAccount` configuration within the `hardhat.config.ts` file. +If no such configuration is present, the account with index `0` will be used. + +`yarn hardhat deploy-zksync:libraries` -- runs compilation and deployment of missing libraries +(the list of all missing libraries is provided by the output of `@matterlabs/hardhat-zksync-solc` plugin). + +The account used for deployment will be the one specified by the `deployerAccount` configuration within the `hardhat.config.ts` file. +If no such configuration is present, the account with index `0` will be used. + +- `--private-key-or-index <PRIVATE_KEY_OR_INDEX>` - An optional argument, libraries are deployed either using the provided private key +or by default using the account specified by its index in the accounts array for the specified network. +- `--no-auto-populate-config` - Flag which disables the auto-population of the hardhat config file. Enabled by default. +- `--external-config-object-path <file path>` - Specifies the path to the file containing the zksolc configuration. +If not set, it defaults to the Hardhat configuration file path. Works only if auto-population is enabled. +- `--exported-config-object <object name>` - Specifies the name of the user's Hardhat config object within the Hardhat configuration file. +Primarily for auto-population. Defaults to `config`. +- `--compile-all-contracts` - Compile all contracts with deployed libraries. Disabled by default. + +```bash +// Automatically using the account. +yarn hardhat deploy-zksync:libraries + +// Specifying a private key for the deployment. +yarn hardhat deploy-zksync:libraries --private-key-or-index 0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 + +// Specifying a accounts index for the deployment. +yarn hardhat deploy-zksync:libraries --private-key-or-index 2 +``` + +Example of using the `--exported-config-object <object name>` argument: + +```javascript +const someObject = { + zksolc: { + compilerSource: 'binary', + settings: { + }, + solidity: { + compilers: compilers, + }, + .... + }, +} + +module.exports = someObject; +``` + +```bash +yarn hardhat deploy-zksync:libraries --exported-config-object someObject +``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +In Typescript projects `--exported-config-object <object name>` argument can be provided optionally. +Plugin will try to resolve config by `HardhatUserConfig` type. +:: diff --git a/content/00.build/40.tooling/20.hardhat/70.hardhat-zksync-upgradable.md b/content/00.build/40.tooling/20.hardhat/70.hardhat-zksync-upgradable.md new file mode 100644 index 00000000..c52bac1e --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/70.hardhat-zksync-upgradable.md @@ -0,0 +1,894 @@ +--- +title: hardhat-zksync-upgradable +description: +--- + +The `hardhat-zksync-upgradable` plugin is a Hardhat plugin that supports end-to-end pipelines +for deploying and updating upgradable smart contracts on the zkSync Era network. + +The plugin is based on [@openzeppelin/upgrades-core](https://www.npmjs.com/package/@openzeppelin/upgrades-core) plugin +for deploying and managing upgradeable smart contracts on the Ethereum network. +The `hardhat-zksync-upgradable` plugin provides an easy-to-use interface for interacting with the +[OpenZeppelin Upgrades Plugins](https://docs.openzeppelin.com/upgrades-plugins) within a Hardhat environment on zkSync. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + - Compatible with ethers **v6** (⭐ Recommended) + +Examples are adopted for plugin version **>=1.0.0** +:: + +## Installation + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Current version of the upgradable plugin does not support the latest version of the `@openzeppelin/upgrades-core` package. +:: + +[@matterlabs/hardhat-zksync-upgradable](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-upgradable) + +Add the latest version of this plugin to your project with the following command: + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-upgradable @openzeppelin/upgrades-core @openzeppelin/contracts-upgradeable@4.9.5 @openzeppelin/contracts@4.9.5 +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-upgradable @openzeppelin/contracts-upgradeable@4.9.5 @openzeppelin/contracts@4.9.5 +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-upgradable @openzeppelin/contracts-upgradeable@4.9.5 @openzeppelin/contracts@4.9.5 --dev +``` + +:: + +## Configuration + +After installing it, add the plugin to your `hardhat.config.ts` file: + +```typescript +import "@matterlabs/hardhat-zksync"; +// upgradable plugin +import "@matterlabs/hardhat-zksync-upgradable"; + +import { HardhatUserConfig } from "hardhat/config"; + +const config: HardhatUserConfig = { + zksolc: { + version: "latest", + settings: {}, + }, + defaultNetwork: "zkSyncNetwork", + networks: { + ethNetwork: { + zksync: false, + url: "http://localhost:8545", + }, + zkSyncNetwork: { + zksync: true, + ethNetwork: "ethNetwork", + url: "http://localhost:3050", + }, + }, + solidity: { + version: "0.8.19", + }, +}; + +export default config; +``` + +## Deploying proxies + +The plugin supports three types of proxies: Transparent upgradable proxies, UUPS proxies, and beacon proxies. + +Upgradability methods are all part of the `zkUpgrades` property in the `HardhatRuntimeEnvironment` +and you only need to interact with it in order to deploy or upgrade your contracts. + +For the following examples, we use the simple `Box` smart contract: + +```typescript +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.16; +import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; + + +contract Box is Initializable{ + uint256 private value; + uint256 private secondValue; + uint256 private thirdValue; + + function initialize(uint256 initValue) public initializer { + value = initValue; + } + + // Reads the last stored value + function retrieve() public view returns (uint256) { + return value; + } + + // Stores a new value in the contract + function store(uint256 newValue) public { + value = newValue; + emit ValueChanged(newValue); + } + // Emitted when the stored value changes + event ValueChanged(uint256 newValue); + +} +``` + +In the examples below, we assume that the Box contract is compiled and its artifact loaded using +Deployer class from the [hardhat-zksync-deploy plugin](hardhat-zksync-deploy). +More info on how to compile and load the contract can be found in the [Hardhat getting started page](getting-started). + +## Transparent upgradable proxies + +Transparent upgradable proxies provide a way to upgrade a smart contract without changing its address +or requiring any change in the contract's interaction code. +With transparent proxies, a contract's address is owned by a proxy contract, which forwards all calls to the actual contract implementation. +When a new implementation is deployed, the proxy can be upgraded to point to the new implementation, +allowing for seamless upgrades without requiring changes to the contract's interaction code. + +To deploy a simple upgradable contract on zkSync Era local setup, first create a test wallet and add it to the new Deployer. + +```typescript +// mnemonic for local node rich wallet +const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; +const zkWallet = Wallet.fromMnemonic(testMnemonic); + +const deployer = new Deployer(hre, zkWallet); +``` + +After that, load the `Box` artifact and call the `deployProxy` method from the `zkUpgrades` hre property. + +```typescript +const contractName = "Box"; +const contract = await deployer.loadArtifact(contractName); +await hre.zkUpgrades.deployProxy(deployer.zkWallet, contract, [42], { initializer: "initialize" }); +``` + +The `deployProxy` method deploys your implementation contract on zkSync Era, deploys the proxy admin contract, and finally, deploys the transparent proxy. + +### Full deploy proxy script + +```typescript +import { Deployer } from "@matterlabs/hardhat-zksync"; +import { Wallet } from "zksync-ethers"; + +import * as hre from "hardhat"; + +async function main() { + const contractName = "Box"; + console.log("Deploying " + contractName + "..."); + + // mnemonic for local node rich wallet + const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + + const deployer = new Deployer(hre, zkWallet); + + const contract = await deployer.loadArtifact(contractName); + const box = await hre.zkUpgrades.deployProxy(deployer.zkWallet, contract, [42], { initializer: "initialize" }); + + await box.waitForDeployment(); + console.log(contractName + " deployed to:", await box.getAddress()); + + box.connect(zkWallet); + const value = await box.retrieve(); + console.log("Box value is: ", value); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); +``` + +Run the script with: + +::code-group + +```bash [yarn] +yarn hardhat run SCRIPT_FILE +``` + +```bash [npm] +npx hardhat run SCRIPT_FILE +``` + +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +- deployProxy method (and other deploy/upgrade methods from the zkUpgrades) needs to know which wallet to use to deploy smart contracts. +- For this reason, the wallet needs to have a configured provider that connects it to the specific zkSync network. +- This provider is configured in the hardhat config file, by stating the RPC url of the network to connect to. + +:: + +### Openzeppelin Version + +The plugin does not work with the latest versions due to a blocker on the `@matterlab/zksync-contracts` package. +The solution is to change the development dependencies to the previous version in your `package.json`. + +```json + "@openzeppelin/contracts": "^4.9.5", + "@openzeppelin/contracts-upgradeable": "^4.9.5", +``` + +### Hardhat config + +```typescript +defaultNetwork: 'zkSyncNetwork', + networks: { + sepolia: { + zksync: false, + url: 'http://localhost:3050', + }, + zkSyncNetwork: { + zksync: true, + ethNetwork: 'sepolia', + url: 'http://localhost:8545', + }, + }, +``` + +Since the provider was instantiated on creating the `Deployer` class, based on your Hardhat configuration, +we only have to pass the `deployer.zkWallet` and be sure that the correct provider is already set. + +On the other hand, if you need to explicitly set the provider, do that with the code below: + +```typescript + import { Provider } from "zksync-ethers"; + + const provider = new Provider("%%zk_testnet_rpc_url%%"); + + const testMnemonic = 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle'; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + zkWallet.connect(provider); + + const deployer = new Deployer(hre, zkWallet); + ... +``` + +## UUPS proxies + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +If you want to use the plugin's UUPS proxy functionality, use zksolc version >=1.3.9. +:: + +The UUPS proxy pattern is similar to the transparent proxy pattern, +except that the upgrade is triggered via the logic contract instead of from the proxy contract. + +For the UUPS deployment example, we use a slightly modified smart contract called `BoxUups`. + +```typescript + +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.16; +import '@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol'; +import '@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol'; +import '@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol'; + +contract BoxUups is Initializable, { + uint256 private value; + uint256 private secondValue; + uint256 private thirdValue; + + function initialize(uint256 initValue) public initializer { + value = initValue; + __Ownable_init(); + __UUPSUpgradeable_init(); + } + + // Reads the last stored value + function retrieve() public view returns (uint256) { + return value; + } + + // Stores a new value in the contract + function store(uint256 newValue) public { + value = newValue; + emit ValueChanged(newValue); + } + + function _authorizeUpgrade(address) internal override onlyOwner {} + + // Emitted when the stored value changes + event ValueChanged(uint256 newValue); +} +``` + +The main difference between the `Box` and `BoxUups` contracts is that the latter implements both `UUPSUpgradeable` and `OwnableUpgradeable` interfaces +and has a special function `_authorizeUpgrade` which can only be called by the contract owner. + +You can find more info about how UUPS works in +[OpenZeppelin's documentation](https://docs.openzeppelin.com/contracts/4.x/api/proxy#transparent-vs-uups). + +To deploy the UUPS contract, use the same script as for the transparent upgradable proxy. + +```typescript +async function main() { + const contractName = 'BoxUups'; + console.info(chalk.yellow('Deploying ' + contractName + '...')); + + // mnemonic for local node rich wallet + const testMnemonic = 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle'; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + ... +``` + +When you run the script, the plugin detects that the proxy type is UUPS, it executes the deployment, and saves the deployment info in your manifest file. + +## Beacon proxies + +Beacon proxies are a more advanced form of proxy that use an intermediate contract (called the Beacon contract) +to delegate calls to a specific implementation contract. + +Beacon proxies enable a more advanced upgrade pattern, where multiple implementation contracts can be deployed and "hot-swapped" on the fly +with no disruption to the contract's operation. + +This allows for more advanced upgrade patterns, such as adding or removing functionality while minimizing downtime. + +1. Start by creating a `Deployer` for the zkSync Era network and load the `Box` artifact: + + ```typescript + // mnemonic for local node rich wallet + const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + + const deployer = new Deployer(hre, zkWallet); + + const contractName = "Box"; + const boxContract = await deployer.loadArtifact(contractName); + ``` + +1. Deploy the beacon contract using `deployBeacon` method from the `zkUpgrades` + + ```typescript + await hre.zkUpgrades.deployBeacon(deployer.zkWallet, boxContract); + ``` + +1. Use the `deployBeaconProxy` method which receives the zkSync Era wallet, beacon contract, and the implementation (Box) contract with its arguments. + + ```typescript + const box = await hre.zkUpgrades.deployBeaconProxy(deployer.zkWallet, beacon, boxContract, [42]); + ``` + +After that, your beacon proxy contract is deployed on the network, and you can interact with it. + +### Full code for deploy beacon + +```typescript +import { Deployer } from "@matterlabs/hardhat-zksync"; +import { Wallet } from "zksync-ethers"; + +import * as hre from "hardhat"; + +async function main() { + const contractName = "Box"; + console.log("Deploying " + contractName + "..."); + // mnemonic for local node rich wallet + const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + + const deployer = new Deployer(hre, zkWallet); + + const boxContract = await deployer.loadArtifact(contractName); + const beacon = await hre.zkUpgrades.deployBeacon(deployer.zkWallet, boxContract); + await beacon.waitForDeployment(); + console.log("Beacon deployed to:", await beacon.getAddress()); + + const box = await hre.zkUpgrades.deployBeaconProxy(deployer.zkWallet, await beacon.getAddress(), boxContract, [42]); + await box.waitForDeployment(); + console.log(contractName + " beacon proxy deployed to: ", await box.getAddress()); + + box.connect(zkWallet); + const value = await box.retrieve(); + console.log("Box value is: ", value); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); +``` + +Run the script with: + +::code-group + +```bash [yarn] +yarn hardhat run SCRIPT_FILE +``` + +```bash [npm] +npx hardhat run SCRIPT_FILE +``` + +:: + +## Implementation addresses check + +Once you deploy the proxy contract, all interactions with your implementation contract go through it. + +If you invoke the `deployProxy` function multiple times for a single implementation contract, +several proxies will be created, but the implementation contract will remain the same for all of them. +This means we can optimize the process to check for the existing implementation addresses before deploying a new proxy, +instead of deploying a new implementation contract every time. + +The upgradable plugin saves this information in the manifest file. This file will be created in your project's `.upgradable` folder. +The manifest file is created per network, meaning you will have different data saved for upgrading contracts on the local setup and zkSync Era networks. + +## Upgrading proxies + +### Validations + +In order for a smart contract implementation to be upgradable, it has to follow specific [rules](https://docs.openzeppelin.com/upgrades-plugins/1.x/writing-upgradeable). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +- The current version of the `hardhat-zksync-upgradable` plugin does **NOT** support all the validation checks. +- This means that it is the users responsibility to check if the new implementation they want to upgrade follows the predefined standards. +- At the time of writing, we are working on implementing those checks within the plugin itself, +and the plan for subsequent releases is to support them natively. + +:: + +### Upgradable examples + +The following examples use the `BoxV2` contract as a new implementation for the `Box` proxy. + +```typescript +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.16; + +import "@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol"; + +contract BoxV2 is Initializable{ + uint256 private value; + + // Emitted when the stored value changes + event ValueChanged(uint256 newValue); + + function initialize(uint256 initValue) public initializer { + value = initValue; + } + + // Stores a new value in the contract + function store(uint256 newValue) public { + value = newValue; + emit ValueChanged(newValue); + } + + // Reads the last stored value and returns it with a prefix + function retrieve() public view returns (string memory) { + return string(abi.encodePacked("V2: ", uint2str(value))); + } + + // Converts a uint to a string + function uint2str(uint _i) internal pure returns (string memory) { + if (_i == 0) { + return "0"; + } + uint j = _i; + uint len; + while (j != 0) { + len++; + j /= 10; + } + bytes memory bstr = new bytes(len); + uint k = len; + while (_i != 0) { + k = k - 1; + uint8 temp = (48 + uint8(_i - (_i / 10) * 10)); + bytes1 b1 = bytes1(temp); + bstr[k] = b1; + _i /= 10; + } + return string(bstr); + } +} +``` + +## Upgrade transparent proxy + +To upgrade the implementation of the transparent upgradeable contract, use the `upgradeProxy` method from the `zkUpgrades`. + +```typescript + const BoxV2 = await deployer.loadArtifact('BoxV2'); + await hre.zkUpgrades.upgradeProxy(deployer.zkWallet, <PROXY_ADDRESS>, BoxV2); +``` + +`upgradeProxy` receives 3 arguments: + +- A zkSync Era wallet. +- The address of the previously deployed box proxy. +- The artifact containing the new `Box2` implementation. + +## Upgrade UUPS proxy + +Similar to the deployment script, there are no modifications needed to upgrade the implementation of the UUPS contract, +compared to upgrading the transparent upgradable contract. The only difference is that we use the `BoxUupsV2` as a new implementation contract. + +```typescript + +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.16; +import '@openzeppelin/contracts-upgradeable/proxy/utils/Initializable.sol'; +import '@openzeppelin/contracts-upgradeable/proxy/utils/UUPSUpgradeable.sol'; +import '@openzeppelin/contracts-upgradeable/access/OwnableUpgradeable.sol'; + +contract BoxUupsV2 is Initializable, UUPSUpgradeable, OwnableUpgradeable { + uint256 private value; + uint256 private secondValue; + uint256 private thirdValue; + + function initialize(uint256 initValue) public initializer { + value = initValue; + } + + // Reads the last stored value and returns it with a prefix + function retrieve() public view returns (string memory) { + return string(abi.encodePacked('V2: ', uint2str(value))); + } + + // Converts a uint to a string + function uint2str(uint _i) internal pure returns (string memory) { + if (_i == 0) { + return '0'; + } + uint j = _i; + uint len; + while (j != 0) { + len++; + j /= 10; + } + bytes memory bstr = new bytes(len); + uint k = len; + while (_i != 0) { + k = k - 1; + uint8 temp = (48 + uint8(_i - (_i / 10) * 10)); + bytes1 b1 = bytes1(temp); + bstr[k] = b1; + _i /= 10; + } + return string(bstr); + } + + // Stores a new value in the contract + function store(uint256 newValue) public { + value = newValue; + emit ValueChanged(newValue); + } + + function _authorizeUpgrade(address) internal override onlyOwner {} + + // Emitted when the stored value changes + event ValueChanged(uint256 newValue); +} +``` + +Upgrade proxy script snippet: + +```typescript + const BoxUupsV2 = await deployer.loadArtifact('BoxUupsV2'); + await hre.zkUpgrades.upgradeProxy(deployer.zkWallet, <PROXY_ADDRESS>, BoxUupsV2); +``` + +## Upgrade beacon proxy + +Beacon proxy implementation can be upgraded using a similarly structured method from the `zkUpgrades` called `upgradeBeacon`. For example: + +```typescript + const boxV2Implementation = await deployer.loadArtifact('BoxV2'); + await hre.zkUpgrades.upgradeBeacon(deployer.zkWallet, <BEACON_PROXY_ADDRESS>, boxV2Implementation); +``` + +The example below deploys and upgrades a smart contract using a beacon proxy: + +```typescript +import { Deployer } from "@matterlabs/hardhat-zksync"; +import { Wallet } from "zksync-ethers"; +import * as zk from "zksync-ethers"; +import { Contract } from "ethers"; + +import * as hre from "hardhat"; + +async function main() { + // mnemonic for local node rich wallet + const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; + const zkWallet = Wallet.fromMnemonic(testMnemonic); + const deployer = new Deployer(hre, zkWallet); + + // deploy beacon proxy + const contractName = "Box"; + const contract = await deployer.loadArtifact(contractName); + const beacon = await hre.zkUpgrades.deployBeacon(deployer.zkWallet, contract); + await beacon.waitForDeployment(); + + const beaconAddress = await beacon.getAddress(); + + const boxBeaconProxy = await hre.zkUpgrades.deployBeaconProxy(deployer.zkWallet, beaconAddress, contract, [42]); + await boxBeaconProxy.waitForDeployment(); + + // upgrade beacon + const boxV2Implementation = await deployer.loadArtifact("BoxV2"); + await hre.zkUpgrades.upgradeBeacon(deployer.zkWallet, beaconAddress, boxV2Implementation); + console.info("Successfully upgraded beacon Box to BoxV2 on address: ", beaconAddress); + + const attachTo = new zk.ContractFactory<any[], Contract>(boxV2Implementation.abi, boxV2Implementation.bytecode, deployer.zkWallet, deployer.deploymentType); + const upgradedBox = attachTo.attach(await boxBeaconProxy.getAddress()); + + upgradedBox.connect(zkWallet); + // wait some time before the next call + await new Promise((resolve) => setTimeout(resolve, 2000)); + const value = await upgradedBox.retrieve(); + console.log("New box value is", value); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); +``` + +Run the script with: + +::code-group + +```bash [yarn] +yarn hardhat run SCRIPT_FILE +``` + +```bash [npm] +npx hardhat run SCRIPT_FILE +``` + +:: + +## Proxy verification + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +To use proxy verification functionality, you must use the `hardhat-zksync-verify` plugin version >=0.1.8 +:: + +The hardhat-zksync-upgradable plugin supports proxy verification, which means you can verify +all the contracts deployed during the proxy deployment with a single verify command. + +To use the verification functionality, you first need to +**import the `hardhat-zksync-verify plugin` before the `hardhat-zksync-upgradable` plugin in your `hardhat.config.ts` file:** + +```typescript +... +// Imports the verify plugin before the upgradable plugin +import '@matterlabs/hardhat-zksync-verify'; +import '@matterlabs/hardhat-zksync-upgradable'; +... +``` + +To verify all the deployed contracts, simply run the verify command with the <b>_proxy address_</b> as an argument: + +```sh +yarn hardhat verify <proxy address> +``` + +This command will verify the implementation related to the proxy, the proxy contract itself, +and all the smart contracts included in the specific deployment process, such as a proxy admin smart contract or a beacon smart contract. + +## Proxy validations + +The `hardhat-zksync-upgradable` plugin has built-in checks to ensure that your smart contract's newest implementation version +follows the necessary requirements when upgrading your smart contract. + +You can learn more about what those restrictions are in [OpenZeppelin's documentation](https://docs.openzeppelin.com/upgrades-plugins/1.x/writing-upgradeable). + +## Proxy gas fee estimation + +Should you wish to estimate the total gas used throughout the proxy deployment process, +consider utilizing the upgradable plugin's gas estimation functions. +We offer three types of gas estimation functions for your convenience: + +- estimateGasProxy +- estimateGasBeacon +- estimateGasBeaconProxy + +In the examples provided below, we will use the a Box contract and the deployer in the same way we used them in the previous examples: + +```typescript +// mnemonic for local node rich wallet +const testMnemonic = "stuff slice staff easily soup parent arm payment cotton trade scatter struggle"; +const zkWallet = Wallet.fromMnemonic(testMnemonic); + +const deployer = new Deployer(hre, zkWallet); + +const contractName = "Box"; +const contract = await deployer.loadArtifact(contractName); +``` + +To estimate the deployment fee for the Transparent upgradable proxies and UUPS proxies, +use the `estimateGasProxy` method from the `zkUpgrades.estimation`. +This method calculates the fee for deploying the implementation contract, transparent proxy/UUPS contract, and the ProxyAdmin smart contract. + +::code-group + +```bash [Transparent proxy] +const totalGasEstimation = await hre.zkUpgrades.estimation.estimateGasProxy(deployer, contract, [], { kind: "transparent" }); +``` + +```bash [UUPS proxy] +const totalGasEstimation = await hre.zkUpgrades.estimation.estimateGasProxy(deployer, contract, [], { kind: "uups" }); +``` + +:: + +To estimate the deployment fee for the beacon contract and its corresponding implementation, use the `estimateGasBeacon` method: + +```typescript +const totalGasEstimation = await hre.zkUpgrades.estimation.estimateGasBeacon(deployer, contract, []); +``` + +If you want to get the estimation for the beacon proxy contract, please use the `estimateGasBeaconProxy` method: + +```typescript +const totalGasEstimation = await hre.zkUpgrades.estimation.estimateGasBeacon(deployer, contract, []); +``` + +Each of these methods totals the fee for every contract in their respective pipeline, +displays the cost on the console, and returns the cumulative sum. +If you prefer not to see the individual estimations, +introduce the parameter `quiet` as the final parameter in any method to receive only the returned sum. + +```typescript +const totalGasEstimation = await hre.zkUpgrades.estimation.estimateGasProxy(this.deployer, contract, [], { kind: "uups" }, true); +``` + +## Commands + +Please consider that while the provided commands enable contract deployment and upgrading, not all arguments may be available. +If these commands lack the required functionality, it may be necessary to utilize scripting for a more comprehensive approach. + +## Configuration + +To extend the configuration to support commands, we need to add an accounts field +to the specific network configuration in the networks section of the `hardhat.config.ts` file. +This accounts field can support an array of private keys or a mnemonic object and represents accounts that will be used as wallet automatically. + +```typescript +const config: HardhatUserConfig = { + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", // The Ethereum Web3 RPC URL (optional). + }, + zkSyncSepoliaTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // ADDITION + // The private keys for the accounts used in the deployment or in the upgrade process. + accounts: ["0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3", "0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959"], + // Mnemonic used in the deployment or in the upgrade process + // accounts: { + // mnemonic: 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle' + // } + }, + }, +}; +``` + +- accounts represents a list of the private keys or mnemonic object for the account used in the deployment or in the upgrade process. + + accounts object will automatically be populated with rich accounts if used network is zkSync Era Test Node or zksync-cli Local Node + To establish a default index per network, which is by default `0`, you can include a `deployerAccounts` section in your `hardhat.config.ts` file. + +```typescript +const config: HardhatUserConfig = { + // ADDITION + deployerAccounts: { + zkTestnet: 1, // The default index of the account for the specified network. + //default: 0 // The default value for not specified networks. Automatically set by plugin to the index 0. + }, + networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>", // The Ethereum Web3 RPC URL (optional). + }, + zkSyncSepoliaTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // The private keys for the accounts used in the deployment process. + accounts: ["0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3", "0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959"], + // Mnemonic used in the deployment process + // accounts: { + // mnemonic: 'stuff slice staff easily soup parent arm payment cotton trade scatter struggle' + // } + }, + }, +}; +``` + +- `deployerAccounts` represents an object where the default index of the accounts is provided +and automatically used in the upgradable commands described below. +If the network name is not specified inside the object, the default index of the account will be `0`. +We can change and deafult index for not specified networks if we override `default` name with index that we want. + +## Command list + +```sh +yarn hardhat deploy-zksync:proxy --contract-name <contract name or FQN> \ + [<constructor arguments>] \ + [--constructor-args <javascript module name>] \ + [--deployment-type <deployment type>] \ + [--initializer <initialize method>] \ + [--no-compile] +``` + +Automatically determine whether the deployment requires a Transparent or UUPS proxy, and deploy all necessary contracts accordingly. +If the Transparent proxy is chosen, the deployment will include the implementation, admin, and proxy. +Alternatively, selecting the UUPS proxy will result in deploying the implementation and proxy. + +```sh +yarn hardhat upgrade-zksync:proxy --contract-name <contract name or FQN> \ + --proxy-address <proxy address> \ + [--deployment-type <deployment type>] \ + [--no-compile] +``` + +Upgrade UUPS or Transparent implementation on the specified network. + +```sh +yarn hardhat deploy-zksync:beacon --contract-name <contract name or FQN> \ + [<constructor arguments>] \ + [--constructor-args <javascript module name>] \ + [--deployment-type <deployment type>] \ + [--initializer <initialize method>] \ + [--no-compile] +``` + +Initiates the deployment of the specified implementation, beacon, and proxy on the specified network. + +`yarn hardhat upgrade-zksync:beacon --contract-name <contract name or FQN> --beacon-address <beacon address> [--deployment-type <deployment type>] [--no-compile]` + +Upgrade beacon implementation on the specified network. + +- `--contract-name <contract name or FQN>` - contract name or FQN, required argument in all tasks, e.g. `hardhat deploy-zksync:proxy --contract-name SomeContract`. +- `<constructor arguments>` - list of constructor arguments, e.g. `hardhat deploy-zksync:proxy --contract-name Greeter 'Hello'`. +- `--constructor-args <module name>` - name of javascript module containing complex constructor arguments. +Works only if `<constructor arguments>` are not provided, +e.g. `hardhat deploy-zksync:contract --contract-name ComplexContract --constructor-args args.js`. +Example of `args.js` : + +```typescript +module.exports = [ + "a string argument", + "0xabcdef", + "42", + { + property1: "one", + property2: 2, + }, +]; +``` + +- `--beacon-address <beacon address>` - deployed beacon contract address, +e.g. `yarn hardhat upgrade-zksync:beacon --contract-name BoxV2 --beacon-address 0x4bbeEB066eD09B7AEd07bF39EEe0460DFa261520`. +- `--proxy-address <proxy address>` - deployed proxy contract address, e.g. `yarn hardhat upgrade-zksync:proxy --contract-name BoxV2 --proxy-address 0x4bbeEB066eD09B7AEd07bF39EEe0460DFa261520`. +- `--initializer <initializer method>` - initializer method name present in the contract, +e.g. `hardhat deploy-zksync:proxy --contract-name Contract --initializer store`. If this parameter is omitted, the default value will be `initialize`. +- `--no-compile`- skip the compilation process, e.g. `hardhat deploy-zksync:beacon --contract-name Contract --no-compile`. +- `--deployment-type` - specify which deployer smart contract function will be called. +Permissible values for this parameter include `create`, `create2`, `createAccount`, and `create2Account`. +If this parameter is omitted, the default value will be `create`, +e.g. `hardhat deploy-zksync:beacon --contract-name Greeter 'Hello' --deployment-type create2`. + +The account used for deployment will be the one specified by the `deployerAccount` configuration within the `hardhat.config.ts` file. +If no such configuration is present, the account with index `0` will be used. diff --git a/content/00.build/40.tooling/20.hardhat/80.hardhat-zksync-verify.md b/content/00.build/40.tooling/20.hardhat/80.hardhat-zksync-verify.md new file mode 100644 index 00000000..f3fedd8b --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/80.hardhat-zksync-verify.md @@ -0,0 +1,207 @@ +--- +title: hardhat-zksync-verify +description: +--- + +This plugin is used to verify contracts on the zkSync Era network. + +[Changelog](%%zk_git_repo_hardhat-zksync%%/blob/main/packages/hardhat-zksync-verify/CHANGELOG.md) + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Unknown zksolc version**: If you encounter this error, it suggests that the backend verification system does not currently support +the latest version of the zksolc compiler. +In such cases, it may require some time for the backend to be updated to accommodate the latest compiler version. + +As a temporary solution, please use previous versions of the compiler until the backend verification system is updated to support the latest version. +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + + - Compatible with ethers **v6** (⭐ Recommended) +:: + +## Setup + +The [@matterlabs/hardhat-zksync-verify](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-verify) plugin +is used in conjunction with [@nomicfoundation/hardhat-verify](https://www.npmjs.com/package/@nomicfoundation/hardhat-verify) +and it supports backward compatibility. +To use it, install both plugins and then import `@matterlabs/hardhat-zksync-verify` in the `hardhat.config.ts` file. + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-verify @nomicfoundation/hardhat-verify +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-verify +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-verify --dev +``` + +:: + +### Configuration + +Import the plugin in the `hardhat.config.ts` file: + +```javascript +import "@matterlabs/hardhat-zksync-verify"; +``` + +Add the `verifyURL` property to the zkSync Era network in the `hardhat.config.ts` file as shown below: + +```typescript +networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // Verification endpoint for Sepolia + verifyURL: '%%zk_testnet_block_explorer_url%%/contract_verification' + } +}, +// defaultNetwork: "zkTestnet", // optional (if not set, use '--network zkTestnet') +``` + +Additional network properties: + +- `zkTestnet` is an arbitrary zkSync Era network name. You can select this as the default network using the `defaultNetwork` property. +- `url` is a field with the URL of the zkSync Era node +in case of the zkSync Era network (with `zksync` flag set to `true`), or the URL of the Ethereum node. +This field is required for all zkSync Era and Ethereum networks used by this plugin. +- `ethNetwork` is a field with the URL of the Ethereum node. +You can also provide network name (e.g. `%%zk_testnet_identifier%%`) as the value of this field. +In this case, the plugin will either use the URL of the appropriate Ethereum network configuration (from the `networks` section), +or the default `ethers` provider for the network if the configuration is not provided. This field is required for all zkSync networks used by this plugin. +- `zksync` is a flag that indicates a zkSync Era network configuration. This field is set to `true` for all zkSync Era networks. +If you want to run a `hardhat-verify` verification, this field needs to be set to `false`. +If set to `true`, the verification process will try to run the verification process on the zkSync Era network. +- `verifyURL` is a field that points to the verification endpoint for the specific zkSync network. +This parameter is optional, and its default value is the testnet verification url. + - Testnet: `%%zk_testnet_block_explorer_url%%/contract_verification` + - Mainnet: `%%zk_mainnet_block_explorer_url%%/contract_verification` + +If you want to verify a smart contract on the Ethereum in the same project, +it is important to add `etherscan` field and API key in the `hardhat.config.ts` file: + +```typescript + +networks: { + ... +}, +etherscan: { + apiKey: //<Your API key for Etherscan>, +}, + +``` + +### Commands + +```sh +yarn hardhat verify --network <network> <contract address> +``` + +This command verifies the contract on the given network with the given contract's address. + +When executed in this manner, the verification task attempts to compare the compiled bytecode of all the contracts in your local environment +with the deployed bytecode of the contract you are seeking to verify. If there is no match, it reports an error. + +```sh +yarn hardhat verify --network <network> <contract address> --contract <fully qualified name> +``` + +With the `--contract` parameter you can also specify which contract from your local setup you want to verify by specifying its Fully qualified name. +Fully qualified name structure looks like this: "contracts/AContract.sol:TheContract" + +#### Constructor arguments + +If your contract was deployed with the specific constructor arguments, you need to specify them when running the verify task. For example: + +```sh +yarn hardhat verify --network testnet 0x7cf08341524AAF292255F3ecD435f8EE1a910AbF "Hi there!" +``` + +If your constructor takes a complex argument list, you can write a separate javascript module to export it. +For example, create an `arguments.js` file with the following structure: + +```typescript +module.exports = [ + "a string argument", + "0xabcdef", + "42", + { + property1: "one", + property2: 2, + }, +]; +``` + +Include it in the verify function call by adding a new parameter: `--constructor-args arguments.js`: + +```sh +yarn hardhat verify --network testnet 0x7cf08341524AAF292288F3ecD435f8EE1a910AbF --constructor-args arguments.js +``` + +The hardhat-zksync-verify plugin also supports the verification with encoded constructor parameters. + +In order to use the encoded parameters, you need to specify a separate javascript module and export them as a **_non-array_** parameter. +It is important for encoded arguments to start with `0x` in order to be recognized by the plugin. For example: + +```typescript +module.exports = "0x0x00087a676164696a61310000087a676164696a61310000000000000000000000008537b364a83f5c9a7ead381d3baf9cbb83769bf5"; +``` + +### Verification status check + +The verification process consists of two steps: + +- A verification request is sent to confirm if the given parameters for your contract are correct. +- Then, we check the verification status of that request. + Both steps run when you run the `verify` task, but you will be able to see your specific verification request ID. + You can then use this ID to check the status of your verification request without running the whole process from the beginning. + +The following command checks the status of the verification request for the specific verification ID: + +```sh +yarn hardhat verify-status --verification-id <your verification id> +``` + +### Verify smart contract programmatically + +If you need to run the verification task directly from your code, you can use the hardhat `verify:verify` task +with the previously mentioned parameters with the difference in using `--address` parameter when specifying contract's address. + +```typescript +const verificationId = await hre.run("verify:verify", { + address: contractAddress, + contract: contractFullyQualifedName, + constructorArguments: [...] +}); +``` + +This task returns a verification id if the request was successfully sent. + +You can use this id to check the status of your verification request as described in the section above. + +If you are using encoded constructor args, `constructorArguments` parameter should be a non-array value starting with `0x`. + +```typescript +const verificationId = await hre.run("verify:verify", { + address: contractAddress, + contract: contractFullyQualifedName, + constructorArguments: "0x12345...", +}); +``` diff --git a/content/00.build/40.tooling/20.hardhat/90.hardhat-zksync-verify-vyper.md b/content/00.build/40.tooling/20.hardhat/90.hardhat-zksync-verify-vyper.md new file mode 100644 index 00000000..419ec7ab --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/90.hardhat-zksync-verify-vyper.md @@ -0,0 +1,204 @@ +--- +title: hardhat-zksync-verify-vyper +description: +--- + +This plugin is used to verify vyper contracts on the zkSync Era network. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Current version of the verify vyper plugin has a limitation where in order to verify the vyper contract, +verification request must be sent with exactly the same vyper smart contracts list in your project's "contracts" folder, +as it was during the deployment of that specific vyper contract. + +This means that if you had both `VyperGreeterOne.vy` and `VyperGreeterTwo.vy` smart contracts in your project when you deployed them, +in order to verify each one of them separately, you will also need to have both of them in the project when sending verification request. +In any other situation, you will receive a message that contract's "bytecode doesn't match any of your local contracts". + +In order to minimize this risk, we **_strongly_** recommend you to verify your vyper smart contracts right after their deployment! +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Alpha release**: Because of verification context limitation, hardhat-zksync-verify-vyper plugin is still labeled as `alpha` +and we do NOT recommend using it in the production environment. +On the other hand, we are working on removing this limitation +and we want to encourage you to give us your feedback on the plugin's functionalities, usability or possible improvements. +Please start or engage in the discussion about it +in our [Community Hub](%%zk_git_repo_zksync-developers%%/discussions), +or open a Github issue in the [project's repository](%%zk_git_repo_hardhat-zksync%%/issues). +:: + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Ensure you are using the correct version of the plugin with ethers: + +- For plugin version **<1.0.0**: + + - Compatible with ethers **v5**. + +- For plugin version **≥1.0.0**: + + - Compatible with ethers **v6** (⭐ Recommended) +:: + +## Setup + +The [@matterlabs/hardhat-zksync-verify-vyper](https://www.npmjs.com/package/@matterlabs/hardhat-zksync-verify-vyper) plugin +is used to verify contracts on zkSync network. +To use it, install plugin and then import `@matterlabs/hardhat-zksync-verify-vyper` in the `hardhat.config.ts` file. + +::code-group + +```bash [yarn] +yarn add -D @matterlabs/hardhat-zksync-verify-vyper +``` + +```bash [npm] +npm i -D @matterlabs/hardhat-zksync-verify-vyper +``` + +```bash [bun] +bun add @matterlabs/hardhat-zksync-verify-vyper --dev +``` + +:: + +### Configuration + +Import the plugin in the `hardhat.config.ts` file: + +```javascript +import "@matterlabs/hardhat-zksync-verify-vyper"; +``` + +Add the `verifyURL` property to the zkSync Era network in the `hardhat.config.ts` file as shown below: + +```typescript +networks: { + sepolia: { + url: "https://sepolia.infura.io/v3/<API_KEY>" // The Ethereum Web3 RPC URL (optional). + }, + zkTestnet: { + url: "%%zk_testnet_rpc_url%%", // The testnet RPC URL of zkSync Era network. + ethNetwork: "%%zk_testnet_identifier%%", // The Ethereum Web3 RPC URL, or the identifier of the network (e.g. `mainnet` or `sepolia`) + zksync: true, + // Verification endpoint for Sepolia + verifyURL: '%%zk_testnet_block_explorer_url%%/contract_verification' + } +}, +// defaultNetwork: "zkTestnet", // optional (if not set, use '--network zkTestnet') +``` + +Additional network properties: + +- `zkTestnet` is an arbitrary zkSync Era network name. You can select this as the default network using the `defaultNetwork` property. +- `url` is a field with the URL of the zkSync Era node. This field is required for all zkSync networks used by this plugin. +- `ethNetwork` is a field with the URL of the Ethereum node. You can also provide network name (e.g. `sepolia`) as the value of this field. +In this case, the plugin will either use the URL of the appropriate Ethereum network configuration (from the `networks` section), +or the default `ethers` provider for the network if the configuration is not provided. +This field is required for all zkSync networks used by this plugin. +- `zksync` is a flag that indicates a zkSync Era network configuration. +This field is set to `true` for all zkSync Era networks. Field value `true` is required for this plugin work. +If field is missing or if values is set to `false` plugin will throw a error. +- `verifyURL` is a field that points to the verification endpoint for the specific zkSync network. +This parameter is optional, and its default value is the testnet verification url. + - Testnet: `%%zk_testnet_block_explorer_url%%/contract_verification` + - Mainnet: `%%zk_mainnet_block_explorer_url%%/contract_verification` + +### Commands + +```sh +yarn hardhat verify:vyper --network <network> <contract address> +``` + +This command verifies the contract on the given network with the given contract's address. + +When executed in this manner, the verification task attempts to compare the compiled bytecode of all the contracts +in your local environment with the deployed bytecode of the contract you are seeking to verify. +If there is no match, it reports an error. + +```sh +yarn hardhat verify:vyper --network <network> <contract address> --contract <fully qualified name> +``` + +With the `--contract` parameter you can also specify which contract from your local setup +you want to verify by specifying its Fully qualified name. +Fully qualified name structure looks like this: "contracts/Contract.vy:Contract" + +#### Constructor arguments + +If your contract was deployed with specific constructor arguments, you need to specify them when running the verify task. For example: + +```sh +yarn hardhat verify:vyper --network testnet 0x7cf08341524AAF292255F3ecD435f8EE1a910AbF "Hi there!" +``` + +If your constructor takes a complex argument list, you can write a separate Javascript module to export it. +For example, create an `arguments.js` file with the following structure: + +```typescript +module.exports = [ + "a string argument", + "0xabcdef", + "42", + { + property1: "one", + property2: 2, + }, +]; +``` + +Include it in the verify function call by adding a new parameter: `--constructor-args arguments.js`: + +```sh +yarn hardhat verify:vyper --network testnet 0x7cf08341524AAF292288F3ecD435f8EE1a910AbF --constructor-args arguments.js +``` + +The hardhat-zksync-verify plugin also supports the verification with encoded constructor parameters. + +In order to use the encoded parameters, you need to specify a separate javascript module and export them as a **_non-array_** parameter. +It is important for encoded arguments to start with `0x` in order to be recognized by the plugin. For example: + +```typescript +module.exports = "0x0x00087a676164696a61310000087a676164696a61310000000000000000000000008537b364a83f5c9a7ead381d3baf9cbb83769bf5"; +``` + +### Verification status check + +The verification process consists of two steps: + +- A verification request is sent to confirm if the given parameters for your contract are correct. +- Then, we check the verification status of that request. + Both steps run when you run the `verify:vyper` task, but you will be able to see your specific verification request ID. + You can then use this ID to check the status of your verification request without running the whole process from the beginning. + +The following command checks the status of the verification request for the specific verification ID: + +```sh +yarn hardhat verify-status:vyper --verification-id <your verification id> +``` + +### Verify smart contract programmatically + +If you need to run the verification task directly from your code, +you can use the hardhat `verify:verify:vyper` task with the previously mentioned parameters. + +```typescript +const verificationId = await hre.run("verify:verify:vyper", { + address: contractAddress, + contract: contractFullyQualifedName, + constructorArguments: [...] +}); +``` + +This task returns a verification id if the request was successfully sent. + +You can use this id to check the status of your verification request as described in the section above. + +If you are using encoded constructor args, `constructorArguments` parameter should be a non-array value starting with `0x`. + +```typescript +const verificationId = await hre.run("verify:verify:vyper", { + address: contractAddress, + contract: contractFullyQualifedName, + constructorArguments: "0x12345...", +}); +``` diff --git a/content/00.build/40.tooling/20.hardhat/_dir.yml b/content/00.build/40.tooling/20.hardhat/_dir.yml new file mode 100644 index 00000000..573dd881 --- /dev/null +++ b/content/00.build/40.tooling/20.hardhat/_dir.yml @@ -0,0 +1 @@ +title: Hardhat Plugins diff --git a/content/00.build/40.tooling/30.foundry/10.overview.md b/content/00.build/40.tooling/30.foundry/10.overview.md new file mode 100644 index 00000000..f6f9b4d1 --- /dev/null +++ b/content/00.build/40.tooling/30.foundry/10.overview.md @@ -0,0 +1,55 @@ +--- +title: Overview +description: Learn about foundry-zksync. +--- + +`foundry-zksync` is a specialized fork of [Foundry](https://github.com/foundry-rs/foundry), tailored for zkSync. + +It extends Foundry's capabilities for Ethereum app development to support zkSync, allowing for the compilation, +deployment, testing, and interaction with smart contracts on zkSync. +`foundry-zksync` introduces `--zksync` flag, or the use of `vm.zkVm(true)` to target the zkSync VM. + +### Status and Contribution + +`foundry-zksync` is currently in its **alpha stage**, indicating ongoing development and potential for future enhancements. +It is open-sourced, and contributions from the developer community are welcome. +For more details and contributions, visit the [GitHub repository](%%zk_git_repo_foundry-zksync%%). + +## Features and Limitations + +### Features + +`foundry-zksync` offers a set of features designed to work with zkSync, providing a comprehensive toolkit for smart contract deployment and interaction: + +- **Smart Contract Deployment**: Easily deploy smart contracts to zkSync mainnet, testnet, or a local test node. +- **Asset Bridging**: Bridge assets between L1 and L2, facilitating seamless transactions across layers. +- **Contract Interaction**: Call and send transactions to deployed contracts on %%zk_testnet_name%% or local test node. +- **Solidity Testing**: Write tests in Solidity for a familiar testing environment. +- **Fuzz Testing**: Benefit from fuzz testing, complete with shrinking of inputs and printing of counter-examples. +- **Remote RPC Forking**: Utilize remote RPC forking mode. +- **Flexible Debug Logging**: Choose your debugging style: + - DappTools-style: Utilize DsTest's emitted logs for debugging. + - Hardhat-style: Leverage the popular console.sol contract. +- **Configurable Compiler Options**: Tailor compiler settings to your needs, including LLVM optimization modes. + +### Limitations + +While `foundry-zksync` is **alpha stage**, there are some limitations to be aware of, but not limited to: + +- **Compile Time**: Some users may experience slow compiling. +- **Specific Foundry Features**: Currently features such as `--gas-report` or `--verify` may not work as intended. +We are actively working on providing support for these feature types. +- **Compiling Libraries**: Compiling non-inlinable libraries requires deployment and adding to configuration. + +For more information please refer to [official docs](/build/tooling/hardhat/compiling-libraries). + + ```toml + # In foundry.toml + libraries = [ + "src/MyLibrary.sol:MyLibrary:0xfD88CeE74f7D78697775aBDAE53f9Da1559728E4" + ] + ``` + +::callout{icon="i-heroicons-information-circle" color="blue"} +We are actively working to resolve limitations listed. Please check back on future releases for updates. +:: diff --git a/content/00.build/40.tooling/30.foundry/20.getting-started.md b/content/00.build/40.tooling/30.foundry/20.getting-started.md new file mode 100644 index 00000000..2e0d7fd2 --- /dev/null +++ b/content/00.build/40.tooling/30.foundry/20.getting-started.md @@ -0,0 +1,354 @@ +--- +title: Getting Started +description: Learn how to setup and use Foundry with your zkSync project. +--- + +## Prerequisites + +The primary prerequisite for using `foundry-zksync` is the [Rust Compiler](https://www.rust-lang.org/tools/install). + +## Installation Guide + +To integrate `foundry-zksync` into your projects, you have the flexibility to install its components individually or +the entire suite at once. Follow the steps below to get started: + +**Step 1:** Clone the repository: + +```bash +git clone git@github.com:matter-labs/foundry-zksync.git +``` + +**Step 2:** Navigate to the project directory: + +```bash +cd foundry-zksync +``` + +**Step 3:** Run the Installer: Execute the script to install the foundry-zksync binaries forge and cast + +```bash +./install-foundry-zksync +``` + +Once the `forge` and `cast` binaries are installed, you can start using `foundry-zksync`. Source your preferred +profile or refresh your terminal window to activate the changes. You are now ready to begin working with `foundry-zksync`. + +For component-specific installations from source: + +- **Forge**: To install, execute: + +```bash +cargo install --path ./crates/forge --profile local --force --locked +``` + +- **Cast**: To install, run: + +```bash +cargo install --path ./crates/cast --profile local --force --locked +``` + +For the entire suite: + +- Execute the following command for a comprehensive installation: + +```bash +cargo build --release +``` + +Choose the installation that best fits your development needs. + +## Configuration + +### Initial Setup + +After installation, initialize a new project with `forge init <project_name>`, which sets up the basic structure of a new Foundry project. + +### Project Configuration using `foundry.toml` + +Foundry is designed to be very configurable. +You can configure Foundry using a file called `foundry.toml` in the root of your project, or any other parent directory. + +Configuration can be arbitrarily namespaced by profiles. The default profile is named `default`. + +You can select another profile using the `FOUNDRY_PROFILE` environment variable. +You can also override parts of your configuration using `FOUNDRY_` or `DAPP_` prefixed environment variables, like `FOUNDRY_SRC`. + +`forge init` creates a basic, extendable `foundry.toml` file. + +To see your current configuration, run `forge config`. To see only basic options (as set with `forge init`), run `forge config --basic`. +This can be used to create a new `foundry.toml` file with `forge config --basic > foundry.toml`. + +By default `forge config` shows the currently selected foundry profile and its values. +It also accepts the same arguments as `forge build`. +An example `foundry.toml` for zkSync with zksolc configurations may look like: + +```toml +[profile.default] +src = 'src' +out = 'out' +libs = ['lib'] + +[profile.zksync] +src = 'src' +libs = ['lib'] +fallback_oz = true +is_system = false +mode = "3" +``` + +### Private key setup with Foundry keystore + +:display-partial{ path="_partials/_foundry-create-keystore" } + +## Basic Usage + +### Running Tests + +Use `forge test --zksync` to run tests written for your smart contracts. + +For an overview of how to write tests using `foundry-zksync` please refer to Foundry testing [here](/build/test-and-debug/foundry). + +## Deploying Smart Contracts with `forge` + +### Compilation with `forge build --zksync` + +`forge build --zksync` is used for compiling smart contracts into %%zk_zkevm_label%% bytecode. +The compiled files are stored in a structured directory at `<PROJECT-ROOT>/zkout/`. + +**Usage:** + +```sh +forge build [OPTIONS] --zksync +``` + +**Key Compiler Options:** + +- `--use-zksolc <ZK_SOLC_VERSION>`: Specify the zksolc version or a local zksolc path. +- `--is-system <SYSTEM_MODE>`: Enables system contract compilation mode (`true`/`false`). +- `--force-evmla <FORCE_EVMLA>`: Switch to the EVM legacy assembly pipeline. +- `--fallback-oz <FALLBACK_OZ>`: Recompile with `-Oz` if bytecode is too large. +- `--detect-missing-libraries`: Detect and report missing libraries. +- `-O, --optimization <LEVEL>`: Set LLVM optimization levels. +- `--zk-optimizer`: Optimize specifically for zkSync. + +**Example Usage:** +Compile with default settings or specify `zksolc` version: + +```sh +forge build --zksync +``` + +### Deployment with `forge create --zksync` + +::callout{icon="i-heroicons-information-circle" color="blue"} +The following commands make use of Foundry keystore instead of private keys. +[Learn how to create a keystore](#private-key-setup-with-foundry-keystore). +:: + +`forge create --zksync` deploys smart contracts to zkSync. + +**Usage:** + +```sh +forge create <CONTRACT> [OPTIONS] --rpc-url <RPC-URL> --chain <CHAIN-ID> --account myKeystore --sender <KEYSTORE_ADDRESS> --zksync +``` + +**Options:** + +- `--constructor-args <ARGS>`: Specify constructor arguments. +- `--constructor-args-path <FILE>`: File path for constructor arguments. +- `<CONTRACT>`: Contract identifier in `<path>:<contractname>` format. +- `--factory-deps <FACTORY-DEPS>`: Specify factory dependencies. + +**Example:** +Deploy `Greeter.sol` to %%zk_testnet_name%%: + +<details> +<summary>Click to view the `Greeter.sol` contract</summary> + +```solidity +//SPDX-License-Identifier: Unlicense +pragma solidity ^0.8.0; + +contract Greeter { + string private greeting; + + constructor(string memory _greeting) { + greeting = _greeting; + } + + function greet() public view returns (string memory) { + return greeting; + } + + function setGreeting(string memory _greeting) public { + greeting = _greeting; + } +} +``` + +</details> + +```bash +forge create src/Greeter.sol:Greeter --constructor-args "Hello zkSync" --account myKeystore --sender <KEYSTORE_ADDRESS> --rpc-url %%zk_testnet_rpc_url%% --chain %%zk_testnet_chain_id%% --zksync +``` + +### Deploying Factory Contracts + +To deploy contracts like `GreeterFactory.sol`, use the `is-system` flag. + +<details> +<summary>Click to view the `GreeterFactory.sol` contract</summary> + +```solidity +// SPDX-License-Identifier: Unlicense +pragma solidity ^0.8.0; + +import "./Greeter.sol"; + +contract Factory { + Greeter[] public GreeterArray; + + function CreateNewGreeter(string memory _greeting) public { + Greeter greeter = new Greeter(_greeting); + GreeterArray.push(greeter); + } + + function gfSetter(uint256 _greeterIndex, string memory _greeting) public { + Greeter(address(GreeterArray[_greeterIndex])).setGreeting(_greeting); + } + + function gfGetter(uint256 _greeterIndex) public view returns (string memory) { + return Greeter(address(GreeterArray[_greeterIndex])).greet(); + } +} +``` + +</details> + +**Compile `GreeterFactory.sol`:** + +```bash +forge build --is-system=true --zksync +``` + +**Deploy `GreeterFactory.sol`:** + +```sh +forge create src/GreeterFactory.sol:Factory --factory-deps src/Greeter.sol:Greeter --account myKeystore --sender <KEYSTORE_ADDRESS> --rpc-url %%zk_testnet_rpc_url%% --chain %%zk_testnet_chain_id%% --zksync +``` + +**Deploy `Greeter.sol` via `GreeterFactory.sol`:** + +```sh +cast send <FACTORY_ADDRESS> "CreateNewGreeter(string)" "zkSync Rules" --account myKeystore --sender <KEYSTORE_ADDRESS> --rpc-url %%zk_testnet_rpc_url%% --chain %%zk_testnet_chain_id%% +``` + +**Interact with `Greeter.sol`** + +```sh +cast call <CONTRACT_ADDRESS> "greet()(string)" --rpc-url %%zk_testnet_rpc_url%% --chain %%zk_testnet_chain_id%% +``` + +**Output:** + +```sh +0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c7a6b53796e632052756c65730000000000000000000000000000000000000000 +``` + +**To decode the output to a readable string:** + +```sh +cast to-ascii 0x0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000c7a6b53796e632052756c65730000000000000000000000000000000000000000 +``` + +**Output:** + +```sh +zkSync Rules +``` + +## Basic zkSync Chain Interactions with `cast` + +### Introduction + +This guide introduces you to fundamental interactions within the zkSync chain using `cast`, a component of the `foundry-zksync` toolkit. +Learn how to query chain IDs, retrieve client versions, check L2 ETH balances, obtain gas prices, and more. + +### Chain ID Retrieval + +- **Local Node:** + + Retrieve the Chain ID for a local zkSync node with: + + ```sh + cast chain-id --rpc-url http://localhost:3050 + ``` + + Expected Output: `270`, indicating the Chain ID of your local zkSync node. + +- **%%zk_testnet_name%%:** + + For the %%zk_testnet_name%%, use: + + ```sh + cast chain-id --rpc-url %%zk_testnet_rpc_url%% + ``` + + Expected Output: `%%zk_testnet_chain_id%%`, the Chain ID for the %%zk_testnet_name%%. + +### Client Version Information + +Knowing the client version is vital for compatibility checks and debugging: + +```sh +cast client --rpc-url %%zk_testnet_rpc_url%% +``` + +Expected Output: `zkSync/v2.0`, denoting the client version. + +### L2 Balance Check + +Verify the Layer 2 (L2) balance of an account: + +```sh +cast balance 0x8b1d48a69ACEbC6eb201e2F4d162A002203Bfe8E --rpc-url %%zk_testnet_rpc_url%% +``` + +Expected Output: A numerical value, e.g., `774909739323110932`, representing the account's L2 balance. + +### Current Gas Price + +Fetch the current gas price on the network for transaction cost estimations: + +```sh +cast gas-price --rpc-url %%zk_testnet_rpc_url%% +``` + +Expected Output: A value such as `100000000`, indicating the current gas price. + +### Latest Block Details + +Gain insights into the latest block on the zkSync chain: + +```sh +cast block latest --rpc-url %%zk_testnet_rpc_url%% +``` + +Expected Output: Detailed information about the latest block, including base fee per gas, gas limit, block hash, and more. + +### Sending Transactions + +Initiate transactions, such as contract function calls, using `cast`: + +```sh +cast send <CONTRACT_ADDRESS> <FUNCTION_SIGNATURE> <ARGUMENTS> --rpc-url <RPC-URL> --account myKeystore --sender <KEYSTORE_ADDRESS> --chain <CHAIN-ID> +``` + +Example: + +```sh +cast send 0xe34E488C1B0Fb372Cc4a5d39219261A5a6fc7996 "setGreeting(string)" "Hello, zkSync!" --rpc-url %%zk_testnet_rpc_url%% --account myKeystore --sender <KEYSTORE_ADDRESS> --chain %%zk_testnet_chain_id%% +``` + +This command calls the `setGreeting` function of a contract, updating the greeting to "Hello, zkSync!". diff --git a/content/00.build/40.tooling/_dir.yml b/content/00.build/40.tooling/_dir.yml new file mode 100644 index 00000000..a750e3b2 --- /dev/null +++ b/content/00.build/40.tooling/_dir.yml @@ -0,0 +1 @@ +title: Tooling diff --git a/content/00.build/60.test-and-debug/00.index.md b/content/00.build/60.test-and-debug/00.index.md new file mode 100644 index 00000000..87f541fa --- /dev/null +++ b/content/00.build/60.test-and-debug/00.index.md @@ -0,0 +1,60 @@ +--- +title: Getting Started +description: Learn about the recommended paths of testing and debugging your projects on zkSync. +--- + +zkSync Era provides two distinct testing environments for your local development needs: + +- Dockerized local setup +- In-Memory Node. + +Each solution boasts unique characteristics tailored to diverse use cases. +This section aims to unpack the intricacies of these tools, aiding you in selecting the setup best suited for your development workflow. + +--- +## In-Memory node vs Dockerized local setup + +The local testing process revolves around two principal options: + +1. **Dockerized local setup**: An extensive zkSync Era network simulation that comprises a Postgres database, +a local Geth node functioning as Layer 1, and the zkSync node. +Opt for this setup for comprehensive simulations and testing that require interaction with both L1 and L2. + +2. **In-Memory node**: A lightweight, speedy alternative, the in-memory node, supports forking the state from various networks, +including the mainnet and testnet. This choice is ideal for swift testing, prototyping, and bootloader and system contract testing. + +### When to use each + +- Use the **Dockerized local setup** for in-depth simulations and tests that necessitate L1 and L2 interaction. +This detailed setup mirrors how your contracts will function within the mainnet zkSync Era network. + +- Opt for the **In-Memory node** for swift testing, prototyping, or testing new changes via the local bootloader and system contracts. +This setup facilitates forking the state from the mainnet or testnet, suitable for replaying transactions +or observing the impact of modifications on existing contracts. + +### Feature comparison + +The following table highlights the key characteristics of each testing environment for a quick comparison: + +| Feature | In-memory node | Dockerized local setup | +| --------------------------------------- | ------------------- | ---------------------- | +| Quick startup | Yes | No | +| Supports forking state | Yes | No | +| Console.log debugging | Yes | No | +| Detailed call traces | Yes | No | +| Pre-configured 'rich' accounts | Yes | Yes | +| Replay existing transactions | Yes | No | +| Fast for integration testing | Yes | No | +| Communication between Layer 1 & Layer 2 | No | Yes | +| Multiple transactions per batch | No | Yes | +| Complete set of APIs | No (Basic set only) | Yes | +| Websocket support | No | Yes | + +Whether you're testing new contracts, debugging transactions, or prototyping, zkSync Era provides robust options for local testing. +Both the Dockerized local setup and the In-Memory Node offer feature-rich and quick setup options, each with their distinct strengths and limitations. +Choose the most appropriate setup based on your specific needs, and happy testing! + +## Use zkSync CLI for easy setup + +The [zkSync CLI](/build/tooling/zksync-cli) makes it simple for developers to work with both the Dockerized local setup and In-Memory Node. +Use `zksync-cli dev start` to get your local development environment running along with additional modules like Block Explorer, Wallet and Bridge. diff --git a/content/00.build/60.test-and-debug/10.dockerized-l1-l2-nodes.md b/content/00.build/60.test-and-debug/10.dockerized-l1-l2-nodes.md new file mode 100644 index 00000000..0d7d5539 --- /dev/null +++ b/content/00.build/60.test-and-debug/10.dockerized-l1-l2-nodes.md @@ -0,0 +1,109 @@ +--- +title: Docker L1 - L2 Nodes +description: Guide to setup dockerized containers of L1 and L2 nodes. +--- + +Welcome to this step-by-step guide on establishing a local testing environment using Docker for zkSync development. +With this guide, you can effortlessly emulate the zkSync environment on your local system, making it simpler to test and develop features. +Let's get started! + +**Prerequisites**: + +1. **Docker and docker-compose**: Ensure that Docker and `docker-compose` are installed on your machine. +If you haven't already installed them, follow the [installation guide](https://docs.docker.com/get-docker/). +2. **zkSync Hardhat plugins**: A foundational understanding of the zkSync Hardhat plugins will be beneficial. +New to zkSync development with Hardhat? Explore the [Getting Started section](/build/tooling/hardhat/getting-started). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The `matterlabs/local-node` Docker image is currently based on the protocol `Version19` (deprecated on February 5, 2024), +and it will be upgraded in the coming months. +It should only be used for testing L1 <-> L2 communication. +:: + +--- + +## Set up the testing environment + +1. Clone the dockerized zkSync project repository to your local machine: + + ```bash + git clone %%zk_git_repo_local-setup%% + ``` + +1. To start the local node, navigate to the cloned directory: + + ```bash + cd local-setup + ``` + +1. Launch the zkSync Era node locally using the `start.sh` script: + + ```bash + ./start.sh + ``` + + This script spins up three essential docker containers: + + 1. **Postgres**: The database supporting zkSync. + 2. **Local Geth node**: Acts as the Layer 1 (L1) for zkSync. + 3. **zkSync node**: The core component. + +::callout{icon="i-heroicons-light-bulb" color="blue"} +The first execution of the `start.sh` script should proceed without interruptions. +If it halts unexpectedly, you might need to reset the local zkSync state and retry. +The initialization might take up to 10 minutes initially. +:: + +### Network Details + +- **HTTP JSON-RPC API**: Accessible via port 3050. +- **WebSocket (WS) API**: Accessible through port 3051. + + Default endpoints: + +- **L1 RPC**: <http://localhost:8545> +- **L2 RPC**: <http://localhost:3050> +- **WS API**: <http://localhost:3051> + + **Network Id**: 270 + +--- +## Reset the zkSync State + +If you need to revert the zkSync state to its initial configuration, execute the `clear.sh` script: + +```bash +./clear.sh +``` + +In the event of a "permission denied" error, run the following script with root access: + +```bash +sudo ./clear.sh +``` + +--- +## Leverage rich wallets + +The local zkSync setup generously equips test wallets with ample amounts of ETH on both L1 and L2, making testing easier. + +::drop-panel + ::panel{label="Rich Wallets"} + :display-partial{path="/_partials/_rich-wallets"} + :: +:: + +--- +## Custom configurations (advanced) + +To operate with a custom Postgres database or a distinct Layer 1 node, +you'll need to adjust environment variables within the `docker-compose` file: + +```yaml +environment: + - DATABASE_URL=postgres://postgres@postgres/zksync_local + - ETH_CLIENT_WEB3_URL=http://geth:8545 +``` + +`DATABASE_URL` is the connection URL to the Postgres database, +and `ETH_CLIENT_WEB3_URL` is the endpoint URL for the HTTP JSON-RPC interface of the L1 node. diff --git a/content/00.build/60.test-and-debug/20.in-memory-node.md b/content/00.build/60.test-and-debug/20.in-memory-node.md new file mode 100644 index 00000000..3cccc710 --- /dev/null +++ b/content/00.build/60.test-and-debug/20.in-memory-node.md @@ -0,0 +1,515 @@ +--- +title: In-Memory Node +description: Learn how to setup a local in-memory era_test_node. +--- + +This section provides instructions on setting up and using the In-Memory Node, `era_test_node`, for local testing. +It covers installation, network forking, transaction details viewing, replaying transactions, and testing local bootloader and system contracts. + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +Please keep in mind that `era-test-node` is still in its **alpha** stage, +some features might not be fully supported yet and may not work as fully intended. +It is [open-sourced](%%zk_git_repo_era-test-node%%) and contributions are welcomed. +:: + +## Understand the In-Memory Node + +The In-Memory Node uses an in-memory database for storing state information and simplified hashmaps for tracking blocks and transactions. +In fork mode, it retrieves missing storage data from a remote source when not available locally. +Moreover it also uses the remote server (openchain) to resolve the ABI and topics to human readable names. + +You can visit the `era-test-node` repository [to learn more](%%zk_git_repo_era-test-node%%). + +## Run actions with `zksync-cli` + +You can setup the In-Memory Node quickly with `zksync-cli dev start`. +If you don't have `zksync-cli` setup, see the [Overview](/build/tooling/zksync-cli) guide. + +Note: at the moment this method won't allow you to use additional features like forking networks or replaying transactions. + +## Install and set up `era_test_node` + +1. Download `era_test_node` from latest [Release](%%zk_git_repo_era-test-node%%/releases/latest). + +1. Extract the binary and mark as executable: + + ```bash + tar xz -f /path/to/downloaded/binary/era_test_node.tar.gz -C /usr/local/bin/ + chmod +x /usr/local/bin/era_test_node + ``` + +1. Start the node: + + ```bash + era_test_node run + ``` + +The expected output will be as follows: + +```sh +12:34:56 [INFO] Starting network with chain id: L2ChainId(260) +12:34:56 [INFO] Rich Accounts +12:34:56 [INFO] ============= +12:34:56 [INFO] Account #0: 0x36615Cf349d7F6344891B1e7CA7C72883F5dc049 (1_000_000_000_000 ETH) +12:34:56 [INFO] Private Key: 0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 +12:34:56 [INFO] +12:34:56 [INFO] Account #1: 0xa61464658AfeAf65CccaaFD3a512b69A83B77618 (1_000_000_000_000 ETH) +12:34:56 [INFO] Private Key: 0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3 + +... + +12:34:56 [INFO] Account #9: 0xE90E12261CCb0F3F7976Ae611A29e84a6A85f424 (1_000_000_000_000 ETH) +12:34:56 [INFO] Private Key: 0x3eb15da85647edd9a1159a4a13b9e7c56877c4eb33f614546d4db06a51868b1c +12:34:56 [INFO] +12:34:56 [INFO] ======================================== +12:34:56 [INFO] Node is ready at 127.0.0.1:8011 +12:34:56 [INFO] ======================================== +``` + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +When utilizing `era_test_node` with MetaMask, it's essential to note that any restart of the in-memory node +will necessitate a reset of MetaMask's cached account data (nonce, etc). +In the MetaMask app, navigate to 'Settings', then 'Advanced', and finally, select 'Clear activity tab data'. +:: + +### Network details + +The `era_test_node` has the following default network configurations: + +- **L2 RPC:** `http://localhost:8011` +- **Network Id:** 260 + +These can be configured to your preference. + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +Please note that the existing implementation does not facilitate communication with Layer 1. +As a result, an L1 RPC is not available. +:: + +--- + +## Fork a network + +To fork the mainnet, use the following command, replacing `[network]` with either `mainnet` or `sepolia-testnet`: + +```bash +era_test_node fork [network] +``` + +::drop-panel + ::panel{label="Expected output"} + + ```sh + 14:50:12 INFO Creating fork from "https://mainnet.era.zksync.io:443" L1 block: L1BatchNumber(356201) L2 block: 21979120 with timestamp 1703083811, L1 gas price 41757081846 and protocol version: Some(Version18) + 14:50:12 INFO Starting network with chain id: L2ChainId(260) + 14:50:12 INFO + 14:50:12 INFO Rich Accounts + 14:50:12 INFO ============= + 14:50:16 INFO Account #0: 0xBC989fDe9e54cAd2aB4392Af6dF60f04873A033A (1_000_000_000_000 ETH) + 14:50:16 INFO Private Key: 0x3d3cbc973389cb26f657686445bcc75662b415b656078503592ac8c1abb8810e + 14:50:16 INFO Mnemonic: mass wild lava ripple clog cabbage witness shell unable tribe rubber enter + 14:50:16 INFO + 14:50:16 INFO Account #1: 0x55bE1B079b53962746B2e86d12f158a41DF294A6 (1_000_000_000_000 ETH) + 14:50:16 INFO Private Key: 0x509ca2e9e6acf0ba086477910950125e698d4ea70fa6f63e000c5a22bda9361c + 14:50:16 INFO Mnemonic: crumble clutch mammal lecture lazy broken nominee visit gentle gather gym erupt + + ... + + 14:50:19 INFO Account #9: 0xe2b8Cb53a43a56d4d2AB6131C81Bd76B86D3AFe5 (1_000_000_000_000 ETH) + 14:50:19 INFO Private Key: 0xb0680d66303a0163a19294f1ef8c95cd69a9d7902a4aca99c05f3e134e68a11a + 14:50:19 INFO Mnemonic: increase pulp sing wood guilt cement satoshi tiny forum nuclear sudden thank + 14:50:19 INFO + 14:50:19 INFO ======================================== + 14:50:19 INFO Node is ready at 127.0.0.1:8011 + 14:50:19 INFO ======================================== + ``` + + :: +:: + +This command starts the node, forked at the current head of the selected network. + +You also have the option to specify a custom http endpoint and a custom forking height: + +```bash +era_test_node fork --fork-at 7000000 mainnet http://172.17.0.3:3060 +``` + +--- +## Replay remote transactions locally + +If you wish to replay a remote transaction locally for deep debugging, use the following command: + +```bash +era_test_node replay_tx sepolia-testnet 0x7119045573862797257e4441ff48bf5a3bc4d133a00d167c18dc955eda12cfac +``` + +For more detailed transaction information, such as call traces, add the `--show-calls` flag. +If you want to see ABI names, add the `--resolve-hashes` flag: + +```bash +era_test_node --show-calls=user \ +--resolve-hashes replay_tx sepolia-testnet \ +0x7119045573862797257e4441ff48bf5a3bc4d133a00d167c18dc955eda12cfac +``` + +Alternatively (if your node is already running) you can use `config_setShowCalls` and `config_setResolveHashes` RPC endpoints +to configure these values: + +```bash +# era_test_node already running... + +# Set show-calls to User +curl --request POST \ + --url http://localhost:8011/ \ + --header 'content-type: application/json' \ + --data '{"jsonrpc": "2.0","id": "1","method": "config_setShowCalls","params": ["user"]}' + +# Enable resolve-hashes +curl --request POST \ + --url http://localhost:8011/ \ + --header 'content-type: application/json' \ + --data '{"jsonrpc": "2.0","id": "1","method": "config_setResolveHashes","params": [true]}' +``` + +Here's an example of what you should expect to see when `show-calls` and `resolve-hashes` are configured: + +::drop-panel + ::panel{label="Expected output"} + + ```sh + Creating fork from "%%zk_testnet_rpc_url%%:443" L1 block: L1BatchNumber(4513) L2 block: 14945 with timestamp 1703064786, L1 gas price 61083275326 and protocol version: Some(Version19) + Starting network with chain id: L2ChainId(%%zk_testnet_chain_id%%) + Running 1 transactions (one per batch) + + Validating 0x7119045573862797257e4441ff48bf5a3bc4d133a00d167c18dc955eda12cfac + Executing 0x7119045573862797257e4441ff48bf5a3bc4d133a00d167c18dc955eda12cfac + ┌─────────────────────────┐ + │ TRANSACTION SUMMARY │ + └─────────────────────────┘ + Transaction: SUCCESS + Initiator: 0x4eaf936c172b5e5511959167e8ab4f7031113ca3 + Payer: 0x4eaf936c172b5e5511959167e8ab4f7031113ca3 + Gas - Limit: 2_487_330 | Used: 969_330 | Refunded: 1_518_000 + Use --show-gas-details flag or call config_setShowGasDetails to display more info + + ==== Console logs: + + ==== 22 call traces. Use --show-calls flag or call config_setShowCalls to display more info. + Call(Normal) 0x4eaf936c172b5e5511959167e8ab4f7031113ca3 validateTransaction(bytes32, bytes32, tuple) 1830339 + Call(Normal) 0x0000000000000000000000000000000000000001 0x89c19e9b 1766835 + Call(Normal) 0x4eaf936c172b5e5511959167e8ab4f7031113ca3 payForTransaction(bytes32, bytes32, tuple) 1789767 + Call(Normal) 0x4eaf936c172b5e5511959167e8ab4f7031113ca3 executeTransaction(bytes32, bytes32, tuple) 1671012 + Call(Mimic) 0x5d4fb5385ed95b65d1cd6a10ed9549613481ab2f 0x 1443393 + + ==== 4 events + EthToken System Contract + Topics: + Transfer(address,address,uint256) + 0x0000000000000000000000004eaf936c172b5e5511959167e8ab4f7031113ca3 + 0x0000000000000000000000000000000000000000000000000000000000008001 + Data (Hex): 0x000000000000000000000000000000000000000000000000000c31dac51a6200 + + EthToken System Contract + Topics: + Transfer(address,address,uint256) + 0x0000000000000000000000000000000000000000000000000000000000008001 + 0x0000000000000000000000004eaf936c172b5e5511959167e8ab4f7031113ca3 + Data (Hex): 0x0000000000000000000000000000000000000000000000000009fc4d1bd4ad00 + + EthToken System Contract + Topics: + Transfer(address,address,uint256) + 0x0000000000000000000000004eaf936c172b5e5511959167e8ab4f7031113ca3 + 0x0000000000000000000000005d4fb5385ed95b65d1cd6a10ed9549613481ab2f + Data (Hex): 0x0000000000000000000000000000000000000000000000000090f705956a4008 + + EthToken System Contract + Topics: + Transfer(address,address,uint256) + 0x0000000000000000000000000000000000000000000000000000000000008001 + 0x0000000000000000000000004eaf936c172b5e5511959167e8ab4f7031113ca3 + Data (Hex): 0x000000000000000000000000000000000000000000000000000159273ab13800 + ``` + + :: +:: + +--- +## Send network calls + +You can send network calls against a running `era_test_node`. + +Launch the local in-memory node: + + ```bash + era_test_node fork sepolia-testnet + ``` + +- Use curl to send a network call: + + ::code-group + ```bash [curl] + curl --request POST \ + --url http://localhost:8011 \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_call", + "params": [ + { + "to":"0xe1134444211593Cfda9fc9eCc7B43208615556E2", + "data":"0x313ce567" + }, + "latest" + ] + }' + ``` + + ```bash [expected output] + { + "jsonrpc":"2.0", + "result":"0x0000000000000000000000000000000000000000000000000000000000000012", + "id":1 + } + ``` + :: + +- Use [foundry-zksync](%%zk_git_repo_foundry-zksync%%). + Make sure to install and configure `foundry-zksync` before proceeding + (for installation instructions, please see [Foundry with zkSync Era](%%zk_git_repo_foundry-zksync%%?tab=readme-ov-file#-installation)): + + ::code-group + + ```bash [foundry-zksync] + cast call 0xe1134444211593Cfda9fc9eCc7B43208615556E2 \ + "name()(string)" \ + --rpc-url http://localhost:8011 + ``` + + ```bash [expected output] + Uniswap + ``` + + :: + + Retrieve the balance of a particular contract: + + ::code-group + + ```bash [foundry-zksync] + cast call 0x40609141Db628BeEE3BfAB8034Fc2D8278D0Cc78 \ + "balanceOf(address)(uint256)" \ + 0x40609141Db628BeEE3BfAB8034Fc2D8278D0Cc78 \ + --rpc-url http://localhost:8011 + ``` + + ```bash [expected output] + 28762283719941475444443116625665 + ``` + + :: + +--- + +## Deploy contracts + +For the deployment of your contracts, you have the flexibility to choose between two preferred methods: +either by using Hardhat with the `@matter-labs/hardhat-zksync` plugin, or via [`foundry-zksync`](https://github.com/matter-labs/foundry-zksync). + +The following example will detail the process using `foundry-zksync`. + +Before proceeding, ensure that you've compiled your contracts using `forge build --zksync`. + +```bash [foundry-zksync] +forge create contracts/Greeter.sol:Greeter \ + --constructor-args "zkSync and Foundry" \ + --private-key 7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110 \ + --rpc-url http://localhost:8011 \ + --chain 260 \ + --zksync +``` + +--- +## Test bootloader and system contracts + +In-memory node allows testing of the currently compiled bootloader and system contracts. +This makes it possible to examine the effects of changes on already deployed contracts. + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +These commands assume you have set `$ZKSYNC_HOME` in your shell profile file +(e.g. ~/.bash_profile, ~/.zshrc) to target your local copy of `era_test_node`. +For instance, + +```bash +export ZKSYNC_HOME=/path/to/era_test_node + +export PATH=$ZKSYNC_HOME/bin:$PATH +``` + +:: + +1. Preprocess and compile the contracts: + + ```bash + cd etc/system-contracts + yarn preprocess && yarn hardhat run ./scripts/compile-yul.ts + ``` + +1. To use the locally compiled bootloader and system contracts, run: + + ```bash + RUST_LOG=vm=trace era_test_node --dev-use-local-contracts fork sepolia-testnet + ``` + +--- + +## Pre-configured rich wallets + +In-Memory node includes pre-configured "rich" accounts for testing: + +:display-partial{path="_partials/_rich-wallets"} + +--- + +## Writing and running tests locally + +This section demonstrates how to author and execute tests locally against `era_test_node` using the `mocha` and `chai` testing frameworks with Hardhat. + +### Project configuration + +1. Start by creating a new Hardhat project. If you need guidance, follow the [getting started guide](/build/tooling/hardhat/getting-started). + +1. To incorporate the test libraries, execute: + + ::code-group + + ```bash [yarn] + yarn add -D mocha chai @types/mocha @types/chai + ``` + + ```bash [npm] + npm i mocha chai @types/mocha @types/chai --save-dev + ``` + + ```bash [bun] + bun add mocha chai @types/mocha @types/chai --dev + ``` + + :: + +1. Add the following lines to your `package.json` in the root folder: + + ```json [package.json] + "scripts": { + "test": "NODE_ENV=test hardhat test" + } + ``` + +This script makes it possible to run tests in a Hardhat environment with the `NODE_ENV` env variable set as `test`. + +### Configure tests + +Adjust `hardhat.config.ts` to use the local node for testing: + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +Ensure `era_test_node` is running in another process before executing the test command. +:: + +```typescript [hardhat.config.ts] +import "@matterlabs/hardhat-zksync"; + +module.exports = { + zksolc: { + version: "latest", + settings: {}, + }, + defaultNetwork: "zkSyncTestnet", + networks: { + hardhat: { + zksync: true, + }, + zkSyncTestnet: { + url: "http://localhost:8011", + ethNetwork: "http://localhost:8545", + zksync: true, + }, + }, + solidity: { + version: "0.8.17", + }, +}; +``` + +### Write test scripts + +Construct a `test/main.test.ts` file with the following code: + +```ts [test/main.test.ts] +import { expect } from "chai"; +import { Wallet, Provider, Contract } from "zksync-ethers"; +import * as hre from "hardhat"; +import { Deployer } from "@matterlabs/hardhat-zksync"; + +const RICH_WALLET_PK = "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110"; + +describe("Greeter", function () { + it("Should return the new greeting once it's changed", async function () { + const provider = Provider.getDefaultProvider(); + + const wallet = new Wallet(RICH_WALLET_PK, provider); + const deployer = new Deployer(hre, wallet); + + const artifact = await deployer.loadArtifact("Greeter"); + const greeter = await deployer.deploy(artifact, ["Hi"]); + + expect(await greeter.greet()).to.eq("Hi"); + + const setGreetingTx = await greeter.setGreeting("Hola, mundo!"); + // wait until the transaction is mined + await setGreetingTx.wait(); + + expect(await greeter.greet()).to.equal("Hola, mundo!"); + }); +}); +``` + +To run the test file, execute: + + ::code-group + ```bash [npm] + npm test + ``` + + ```bash [yarn] + yarn test + ``` + + ```bash [pnpm] + pnpm test + ``` + + ```bash [bun] + bun run test + ``` + :: + +Well done! You've successfully run your first local tests with zkSync Era and `era_test_node`. + +--- + +## Troubleshooting + +If running `era_test_node run` provides the following error: + +```sh +“era_test_node” can’t be opened because Apple cannot check it for malicious software. +This software needs to be updated. Contact the developer for more information. +``` + +You may require the use of `sudo`. On macOS, the binary may need to have its quarantine attribute cleared: + +```sh +xattr -d com.apple.quarantine /usr/local/bin/era_test_node +``` diff --git a/content/00.build/60.test-and-debug/30.continuous-integration.md b/content/00.build/60.test-and-debug/30.continuous-integration.md new file mode 100644 index 00000000..ad216599 --- /dev/null +++ b/content/00.build/60.test-and-debug/30.continuous-integration.md @@ -0,0 +1,176 @@ +--- +title: Continuous Integration +description: Use a GitHub Action to integrate era-test-node into your CI/CD environment. +--- + +A GitHub Action is available for integrating `era-test-node` into your CI/CD environments. +This action offers high configurability and streamlines the process of testing your applications in an automated way. + +You can find the GitHub Action available in the [marketplace](https://github.com/marketplace/actions/era-test-node-action). + +::callout{icon="i-heroicons-light-bulb" color="blue"} +In CI tests, use `127.0.0.1` as the URL in hardhat.config.ts or for the provider to avoid '**Cannot connect to network**' errors. +:: + +## Configuration Options + +| Option | Description | Required | Default | Options | +|------------------|--------------------------|----------|--------------------------------|----------------------------------------------------------| +| `mode` | Operation mode. | No | `run` | `run`, `fork` | +| `network` | Network selection. | No | - | - | +| `forkAtHeight` | Block height to fork at. | No | - | - | +| `port` | Listening port. | No | `8011` | - | +| `showCalls` | Call debug visibility. | No | `none` | `none`, `user`, `system`, `all` | +| `showStorageLogs`| Storage log visibility. | No | `none` | `none`, `read`, `write`, `all` | +| `showVmDetails` | VM details visibility. | No | `none` | `none`, `all` | +| `showGasDetails` | Gas details visibility. | No | `none` | `none`, `all` | +| `resolveHashes` | Enable hash resolution. | No | `false` | - | +| `log` | Log filter level. | No | `info` | `debug`, `info`, `warn`, `error` | +| `logFilePath` | Path for the log file. | No | `era_test_node.log` | - | +| `target` | Target architecture. | No | `x86_64-unknown-linux-gnu` | `x86_64-unknown-linux-gnu`, `x86_64-apple-darwin`, `aarch64-apple-darwin` | +| `version` | Version of `era_test_node`. | No | `latest` | - | + +--- + +## Examples + +The following are examples of configuration for your GitHub Action. + +### Quickstart + +```yaml +name: Run Era Test Node Action + +on: + push: + branches: [main] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Run Era Test Node + uses: dutterbutter/era-test-node-action@latest +``` + +### Advanced + +With configuration options: + +```yaml +name: Run Era Test Node Action + +on: + push: + branches: [main] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Run Era Test Node + uses: dutterbutter/era-test-node-action@latest + with: + mode: "run" + showCalls: "user" + showStorageLogs: "read" + showVmDetails: "all" + showGasDetails: "all" + resolveHashes: "true" + log: "info" + logFilePath: "era_test_node.log" + target: "x86_64-unknown-linux-gnu" +``` + +With upload log file to artifacts: + +```yaml +name: Run Era Test Node Action + +on: + pull_request: + branches: [main] + workflow_dispatch: +jobs: + test: + name: unit-tests + strategy: + matrix: + platform: [ubuntu-latest] + runs-on: ${{ matrix.platform }} + + steps: + - name: Checkout Code + uses: actions/checkout@v3 + + - name: Run Era Test Node + uses: dutterbutter/era-test-node-action@latest + with: + mode: "fork" + network: "mainnet" + forkAtHeight: "1855248" + showCalls: "user" + showStorageLogs: "read" + showVmDetails: "all" + showGasDetails: "all" + resolveHashes: "true" + log: "info" + logFilePath: "era_test_node.log" + target: "x86_64-unknown-linux-gnu" + releaseTag: "latest" + + - name: Install Dependencies + run: yarn install + + - name: Run Tests + run: | + yarn test:contracts + + - name: Upload era_test_node log + uses: actions/upload-artifact@v3 + with: + name: era_test_node-log + path: era_test_node.log +``` + +With Fork: + +```yaml +name: Run Era Test Node Action + +on: + push: + branches: [main] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - name: Run Era Test Node + uses: dutterbutter/era-test-node-action@latest + with: + mode: "fork" + network: "mainnet" + forkAtHeight: "1855248" + showCalls: "user" + showStorageLogs: "read" + showVmDetails: "all" + showGasDetails: "all" + resolveHashes: "true" + log: "info" + logFilePath: "era_test_node.log" + target: "x86_64-unknown-linux-gnu" + releaseTag: "latest" +``` diff --git a/content/00.build/60.test-and-debug/40.hardhat.md b/content/00.build/60.test-and-debug/40.hardhat.md new file mode 100644 index 00000000..f8ce7293 --- /dev/null +++ b/content/00.build/60.test-and-debug/40.hardhat.md @@ -0,0 +1,347 @@ +--- +title: Hardhat +description: Learn how to test on era-test-node with Hardhat. +--- + +In the world of decentralized applications, the margin for error is remarkably narrow. +A single mistake in a contract can have catastrophic implications. +For those seeking an efficient method to test and refine their contracts, +this guide showcases how to utilize Hardhat and `era_test_node` for all testing needs. + +To test our contract, we are going to use Hardhat and `era_test_node` for rapid local development. +In our tests we're going to use `zksync-ethers` to interact with the `Greeter` contract, +and we'll use [Mocha](https://mochajs.org/) as our test runner. + +### Prerequisites + +- `zksync-cli` installed from the [zksync-cli section](/build/tooling/zksync-cli). +- `era_test_node` installed and running. See [In-memory Node](/build/test-and-debug/in-memory-node). + +--- +## Environment setup + +1. Create a new project with the required dependencies and boilerplate paymaster implementations: + + ```bash + zksync-cli create test-greeter + ``` + + Choose `Hardhat + Solidity` to setup the project repository. The contract for this guide exists under `/contracts/Greeter.sol`. + + Install dependencies: + + ::code-group + + ```bash [yarn] + yarn install + ``` + + ```bash [npm] + npm install + ``` + + ```bash [bun] + bun install + ``` + + :: + +1. Add the following additional dependencies: + + ::code-group + + ```bash [yarn] + yarn add -D @nomicfoundation/hardhat-chai-matchers @nomiclabs/hardhat-ethers + ``` + + ```bash [npm] + npm add @nomicfoundation/hardhat-chai-matchers @nomiclabs/hardhat-ethers --save-dev + ``` + + ```bash [bun] + bun add @nomicfoundation/hardhat-chai-matchers @nomiclabs/hardhat-ethers --dev + ``` + + :: + +1. Import `@nomicfoundation/hardhat-chai-matchers` into the `hardhat.config.ts` file: + + ```typescript [hardhat.config.ts] + import "@nomicfoundation/hardhat-chai-matchers"; + ``` + + The `@nomicfoundation/hardhat-chai-matchers` plugin adds Ethereum specific capabilities + to the [Chai](https://www.chaijs.com/) assertion library for testing smart contracts. + +1. Start `era_test_node`: + + ```bash + ./target/release/era_test_node run + ``` + +--- +## Run tests with Hardhat + +Under the `/test` directory there is a `main.test.ts` . The initial test checks if our `Greeter` contract returns the set greeting. + +```typescript [/test/main.test.ts] +import { expect } from "chai"; +import { Wallet, Provider, Contract } from "zksync-ethers"; +import * as hre from "hardhat"; +import { Deployer } from "@matterlabs/hardhat-zksync"; +import { zkSyncTestnet } from "../hardhat.config"; + +const RICH_WALLET_PK = "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110"; + +async function deployGreeter(deployer: Deployer): Promise<Contract> { + const artifact = await deployer.loadArtifact("Greeter"); + return await deployer.deploy(artifact, ["Hi"]); +} + +describe("Greeter", function () { + it("Should return the new greeting once it's changed", async function () { + const provider = new Provider(zkSyncTestnet.url); + + const wallet = new Wallet(RICH_WALLET_PK, provider); + const deployer = new Deployer(hre, wallet); + + const greeter = await deployGreeter(deployer); + + expect(await greeter.greet()).to.eq("Hi"); + + const setGreetingTx = await greeter.setGreeting("Hola, mundo!"); + // wait until the transaction is mined + await setGreetingTx.wait(); + + expect(await greeter.greet()).to.equal("Hola, mundo!"); + }); +}); +``` + +To run this test: + +::code-group + +```bash [yarn] +yarn test +``` + +```bash [npm] +npm test +``` + +```bash [bun] +bun run test +``` + +:: +You should see the following output: + +```sh + Greeter + ✔ Should return the new greeting once it's changed (174ms) + + 1 passing (174ms) +``` + +--- + +## Expand test coverage + +Our aim is comprehensive coverage. Here are the test scenarios we will cover: + +1. **Testing greet() function**: Check the returned greeting. +2. **Testing setGreeting() function**: Verify the ability to update greetings. +3. **Testing Insufficient Funds**: Ensure transactions fail without enough funds. +4. **Event Emission**: Ensure an event is emitted when changing the greeting. + +Each of these test cases will rely on a common setup, +which involves creating a provider connected to the %%zk_testnet_name%%, initializing a wallet with a known private key, +and deploying the `Greeter` contract. + +Let's refactor our test file with the provided script: + +::drop-panel + ::panel{label="test/main.test.ts"} + ```typescript [main.test.ts] + import { expect } from "chai"; + import { Wallet, Provider, Contract } from "zksync-ethers"; + import * as hre from "hardhat"; + import { Deployer } from "@matterlabs/hardhat-zksync"; + import { zkSyncTestnet } from "../hardhat.config"; + + const RICH_WALLET_PK = "0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110"; + + // Deploy the Greeter contract + async function deployGreeter(deployer: Deployer): Promise<Contract> { + // Load the Greeter contract artifact + const artifact = await deployer.loadArtifact("Greeter"); + // Deploy the contract with an initial greeting + return await deployer.deploy(artifact, ["Hi"]); + } + + describe("Greeter", function () { + let greeter; + let wallet; + let deployer; + + // Initialize commonly used variables before running the tests + before(async function () { + // Create a provider connected to the zkSync testnet + const provider = new Provider(zkSyncTestnet.url); + + // Create a wallet instance using the rich wallet's private key + wallet = new Wallet(RICH_WALLET_PK, provider); + // Create a deployer instance for contract deployments + deployer = new Deployer(hre, wallet); + // Deploy the Greeter contract + greeter = await deployGreeter(deployer); + }); + + // Test the greet() function + it("Should return the new greeting once it's changed", async function () { + // Ensure the greet function returns the initial greeting after deployment + expect(await greeter.greet()).to.eq("Hi"); + }); + + // Test the setGreeting() function + it("Should set a new greeting and return it", async function () { + // Set a new greeting + const setGreetingTx = await greeter.setGreeting("Hola, mundo!"); + // Wait for the transaction to be confirmed + await setGreetingTx.wait(); + + // Ensure the greet function returns the newly set greeting + expect(await greeter.greet()).to.equal("Hola, mundo!"); + }); + + // Test for lack of funds (or other tx failures) + it("Should fail when insufficient funds", async function () { + // Create an empty wallet with no funds + const userWallet = Wallet.createRandom(); + // Connect the empty wallet to the greeter contract and attempt to set a new greeting + try { + await greeter.connect(userWallet).setGreeting("fail"); + // The following line should not be reached if the transaction fails + expect(true).to.equal(false); + } catch (e) { + // Expect an error to be thrown for the transaction + expect(e).to.exist; + } + }); + + // Test event emission + it("Should emit an event when the greeting is changed", async function () { + const newGreeting = "Bonjour, monde!"; + // Use the provided .emit method to test event emissions + await expect(greeter.setGreeting(newGreeting)).to.emit(greeter, "GreetingChanged").withArgs(newGreeting); + }); + }); + ``` + :: +:: + +To run this test: + +::code-group + +```bash [yarn] +yarn test +``` + +```bash [npm] +npm test +``` + +```bash [bun] +bun run test +``` + +:: + +You should see the following output: + +```sh + Greeter + ✔ Should return the new greeting once it's changed (211ms) + ✔ Should set a new greeting and return it (2682ms) + ✔ Should fail when insufficient funds (299ms) + ✔ Should emit an event when the greeting is changed (2939ms) + + 4 passing (6s) +``` + +## Understanding the test file + +Have a look at the `test/main.test.ts` file's imports: + +```typescript [test/main.test.ts] +import { expect } from "chai"; +import { Wallet, Provider, Contract } from "zksync-ethers"; +import * as hre from "hardhat"; +import { Deployer } from "@matterlabs/hardhat-zksync"; +import { zkSyncTestnet } from "../hardhat.config"; +``` + +This section imports all necessary utilities and configurations needed to run our tests. + +- `expect` from Chai provides assertion functionalities for our tests. +- `Wallet`, `Provider`, and `Contract` from `zksync-ethers` help us with zkSync functionalities like creating wallets and interacting with contracts. +- `hre` and `Deployer` give us hardhat specific functionalities for deploying and interacting with our contract. +- `zkSyncTestnet` from our hardhat configuration provides network details of our running `era_test_node.` + +#### Contract Deployment Utility + +```javascript +async function deployGreeter(deployer: Deployer): Promise<Contract> { ... } +``` + +This utility function simplifies deploying the Greeter contract for our tests. + +#### Main Test Suite + +```javascript +describe('Greeter', function () { + ... +}); +``` + +Here, we've declared our main test suite. +Each test or nested suite inside provides specific scenarios or functionalities we want to test regarding the Greeter contract. + +1. **Initialization** + + Before running any test, we initialize commonly used variables like the provider, wallet, deployer, and the greeter contract. + +2. **Test greet() function** + + We check that the greet function returns the initial greeting of 'Hi' after deployment. + + ```javascript + it("Should return the new greeting once it's changed", async function () { ... }); + ``` + +3. **Test setGreeting() function** + + We test that setting a new greeting updates the contract's state as expected. + + ```javascript + it("Should set a new greeting and return it", async function () { ... }); + ``` + +4. **Test insufficient funds** + + Here, we simulate a scenario where an empty wallet (with no funds) tries to set a new greeting. + We make use of the `connect` method on your `zksync-ethers` Contract object to connect it to a different account. + + ```javascript + it("Should fail when insufficient funds", async function () { ... }); + ``` + +5. **Test event emission** + + We test the emission of an event when the greeting changes in the contract making use of the `hardhat-chai-matchers`. + + ```javascript + it("Should emit an event when the greeting is changed", async function () { ... }); + ``` diff --git a/content/00.build/60.test-and-debug/50.foundry.md b/content/00.build/60.test-and-debug/50.foundry.md new file mode 100644 index 00000000..ace8c07b --- /dev/null +++ b/content/00.build/60.test-and-debug/50.foundry.md @@ -0,0 +1,98 @@ +--- +title: Foundry +description: Learn how to test using Foundry for zkSync. +--- + +For instructions on how to install `foundry-zksync` please refer to the Foundry [Getting Started](/build/tooling/foundry/getting-started) page. + +`foundry-zksync`, a fork of Foundry, provides developers with a tailored testing framework designed specifically for zkSync environments. +Utilizing `forge test --zksync`, you can execute your smart contract tests efficiently. +Tests are written in Solidity, and the framework is designed to recognize any contract function prefixed with `test` as a test case. +By convention, tests are typically stored within the `test/` directory and have a `.t.sol` extension. + +::callout{icon="i-heroicons-light-bulb" color="blue"} +For more detailed documentation related to Foundry testing please refer to the official upstream [Foundry documentation](https://book.getfoundry.sh/forge/tests). +:: + +--- +## Cheatcodes + +Cheatcodes allow you to change the block number, your identity, and more. +`foundry-zksync` supports the most common Foundry cheatcodes. +For an exhaustive list of supported cheatcodes refer to the [Supported Cheatcodes for foundry-zksync](%%zk_git_repo_foundry-zksync%%/blob/main/SUPPORTED_CHEATCODES.md). + +--- +## Writing Tests + +Tests are structured as Solidity contracts, + inheriting from the Forge Standard Library's `Test` contract for enhanced functionality, +which includes basic assertions and logging: + +```solidity +pragma solidity 0.8.10; + +import "forge-std/Test.sol"; + +contract ContractBTest is Test { + uint256 testNumber; + + function setUp() public { + testNumber = 42; + } + + function test_NumberIs42() public { + assertEq(testNumber, 42); + } + + function testFail_Subtract43() public { + testNumber -= 43; + } +} +``` + +#### Key Concepts + +- **`setUp`:** An optional function that runs before each test, used for initializing test conditions. +- **`test`:** Prefix for functions that are recognized as tests. These functions must either pass or revert to indicate success or failure, respectively. +- **`testFail`:** A prefix for test functions expected to revert. If such a function does not revert, the test is considered failed. + +--- +## Running Tests + +To initiate your tests, use the `forge test --zksync` command with the `--zksync` flag, or incorporate `vm.zkVm(true)` within your tests. +This command automatically locates and executes tests across your source directory. + +Here's an example of executing tests in a standard project setup: + +```bash +forge test --zksync + +Running 2 tests for test/Counter.t.sol:CounterTest +[PASS] testFuzz_SetNumber(uint256) (runs: 256, μ: 27553, ~: 28409) +[PASS] test_Increment() (gas: 28379) +Test result: ok. 2 passed; 0 failed; 0 skipped; finished in 96.80ms +``` + +### Filtering Tests + +You can run specific tests by filtering based on the contract or test names: + +```bash +forge test --match-contract CounterTest --match-test test_Increment --zksync +``` + +This command will execute only the tests within `CounterTest` that include `test_Increment` in their name. + +Similarly, you can use `--match-path` to run tests in files that match a specific glob pattern: + +```bash +forge test --match-path test/Counter.t.sol --zksync +``` + +Inverse filters are available through `--no-match-contract`, `--no-match-test`, and `--no-match-path` flags. + +### Watch Mode + +To automatically re-run tests upon any file changes, use the `forge test --watch --run-all --zksync` command. + +--- diff --git a/content/00.build/60.test-and-debug/_dir.yml b/content/00.build/60.test-and-debug/_dir.yml new file mode 100644 index 00000000..6e645e40 --- /dev/null +++ b/content/00.build/60.test-and-debug/_dir.yml @@ -0,0 +1 @@ +title: Test and Debug diff --git a/content/00.build/65.developer-reference/00.index.md b/content/00.build/65.developer-reference/00.index.md new file mode 100644 index 00000000..c6f22118 --- /dev/null +++ b/content/00.build/65.developer-reference/00.index.md @@ -0,0 +1,59 @@ +--- +title: Getting Started +description: Kickstart your development journey with zkSync Era, covering everything from rollups to system contracts and fee structures. +--- + +Welcome to the zkSync Era Developer reference documentation! This guide is your starting point for +understanding the core components and advanced features of zkSync. It provides an essential +overview to help you effectively build on zkSync Era. + +::card-group + ::card + --- + title: Introduction to Rollups + icon: i-heroicons-scale-16-solid + to: /build/developer-reference/intro-rollups + --- + Explore the fundamentals of rollups for enhanced scalability and lower gas costs. + :: + ::card + --- + title: Ethereum Differences + icon: i-heroicons-adjustments-horizontal-16-solid + to: /build/developer-reference/ethereum-differences/evm-instructions + --- + Learn about the key distinctions between Ethereum Layer 1 and zkSync Era. + :: + ::card + --- + title: Native Account Abstraction + icon: i-heroicons-user-circle-16-solid + to: /build/developer-reference/account-abstraction + --- + Utilize account abstraction to streamline user experiences and contract interactions. + :: + ::card + --- + title: zkSync Era Contracts + icon: i-heroicons-document-duplicate-16-solid + to: /build/developer-reference/era-contracts/l1-contracts + --- + Discover the zkSync Era L1 and system contracts. + :: + ::card + --- + title: zkSync Era Fee Model + icon: i-heroicons-currency-dollar-16-solid + to: /build/developer-reference/fee-model + --- + Understand the fee structure in zkSync to optimize transaction costs. + :: + ::card + --- + title: Bridging Assets + icon: i-heroicons-arrow-path-16-solid + to: /build/developer-reference/bridging-assets + --- + Facilitate asset transfers between Ethereum Layer 1 and zkSync Layer 2 efficiently + :: +:: diff --git a/content/00.build/65.developer-reference/10.intro-rollups.md b/content/00.build/65.developer-reference/10.intro-rollups.md new file mode 100644 index 00000000..9f48b050 --- /dev/null +++ b/content/00.build/65.developer-reference/10.intro-rollups.md @@ -0,0 +1,63 @@ +--- +title: Introduction to Rollups +description: Gain a comprehensive understanding of rollups. +--- + +## Introduction + +To better understand rollups we need to dive briefly into Ethereum and Layer 2 solutions. + +The Ethereum network is frequently congested, which results in slow transactions and increased gas prices. +While this has remained so for a long time, an improved solution is needed: one that will not put limits on the throughput, but instead, +achieve a high transaction rate without having to trade off security. That is where Layer 2 solutions shine. + +Layer 2 solutions are designed as an extension to Ethereum, and offer various solutions poised to be the critical scalability component to +the inherent network congestion on Ethereum. Covering all Layer 2 solutions is beyond the scope of this doc. +We will go through a brief explainer on rollups in this section. + +## What are rollups? + +Rollups are a recent development intended to increase the scalability of Ethereum by performing calculations off-chain, rolling many +transactions up into a single batch, and sending it to the main Ethereum chain in a single action. +Instead of submitting each transaction separately, rollup operators submit a summary of the required changes to represent all transactions +in a batch. + +To be able to work on a rollup, funds need to be locked on a smart contract on the Layer 1 blockchain. +This allows transactions to be processed without the overhead of all the data associated with performing a transaction on the main chain. +**Rollups significantly decrease associated transaction processing times and gas fees.** + +## Optimistic vs ZK rollups + +Currently, there are 2 types of rollups used to scale Ethereum. + +1. ZK Rollups (Zero-Knowledge Rollups) - eg: zkSync, Loopring, Starknet, Scroll etc +2. Optimistic Rollups - eg: Optimism, Arbitrum etc + +The main difference between ZK and Optimistic rollups is in the way this batch of transactions becomes <em>final</em>. + +### What are ZK rollups? + +In ZK rollups ('ZK' standing for zero-knowledge) the batch of transactions is verified for correctness on the Ethereum network. After the +verification passes, the batch of transactions is considered final like any other Ethereum transaction. This is achieved through the power +of cryptographic <em>validity proofs</em> (commonly called zero-knowledge proofs). With any batch of off-chain transactions, the ZK rollup +operator generates a proof of validity for this batch. Once the proof is generated, it is submitted to Ethereum to make the roll-up batch final. +In zkSync, this is done via a **SNARK**, succinct non-interactive argument of knowledge. + +### What are Optimistic rollups? + +Optimistic rollups, on the other hand, have no mechanism to prove the validity of the off-chain transactions. Instead, they are considered +“optimistic” because they assume off-chain transactions are valid unless proven otherwise. Hence, they rely on <em>fraud proof systems</em>, a +challenge to the submitted state to Ethereum. If such a challenge is submitted, the Optimistic rollup operator needs to show that the +state and transactions in questions are actually valid. This is a cumbersome process, and requires watchers to make sure that the Optimistic +rollup operator is honest at all times. + +## L1 and L2: what's the difference? + +The term **Layer 1** (or **L1**) is used to refer to the underlying primary chain, such as the Ethereum network or Bitcoin. Layer 1 +blockchains determine protocol rules and transaction finality, and perform the base-level functions of applications built upon them. + +The term **Layer 2** (or **L2**) is used to describe an overlaying application or network that operates on top of the Layer 1 chain. These +are most often built to provide further scalability solutions by taking on a portion of transaction-based tasks to lighten the impact on the +layer 1 chain, quickening transaction times and lowering gas fees. + +**zkSync Era is an L2, where L1 is the main Ethereum blockchain.** diff --git a/content/00.build/65.developer-reference/20.zksync-overview.md b/content/00.build/65.developer-reference/20.zksync-overview.md new file mode 100644 index 00000000..4f0ebb44 --- /dev/null +++ b/content/00.build/65.developer-reference/20.zksync-overview.md @@ -0,0 +1,80 @@ +--- +title: zkSync Era Overview +description: +--- + +## zkSync Era overview + +The general rollup workflow is as follows: + +- Users can receive, deposit, and transfer assets to each other. +- Users can withdraw assets under their control to an L1 address. + +Rollup operation requires the assistance of an operator, who rolls transactions together, computes a zero-knowledge proof of the correct +state transition, and affects the state transition by interacting with the rollup +contract. To understand the design, we need to look into how zkSync rollup transactions work. + +zkSync operations are divided into rollup transactions (initiated inside rollup by a +rollup account) and priority operations (initiated on the mainchain by an Ethereum account). + +The zkSync rollup operation lifecycles are as follows: + +- A user creates a transaction or a priority operation. +- After processing this request, the operator creates a rollup operation and adds it to the block. +- Once the block is complete, the operator submits it to the zkSync smart contract +as a block commitment. Part of the logic of some rollup operations is checked by the smart contract. +- The proof for the block is submitted to the zkSync smart contract as block verification. If the verification succeeds, the new state is considered final. + +Furthermore, on zkSync, each L2 block will progress through the following four stages until it is final. + +- `Pending`: The transaction was received by the operator, but it has not been processed yet. +- `Processed`: The transaction is processed by the operator and is confirmed to be included in the next block. +- `Committed`: This indicates that the transaction data of this block has been +posted on Ethereum. It does not prove that it has been executed in a valid way, but it ensures the + availability of the block data. +- `Finalized`: This indicates that the SNARK validity proof for the transaction has +been submitted and verified by the smart contract. After this step, the transaction is considered to be final. + +The typical time for a transaction to go from `Processed` to `Finalized` is a couple of hours at the current stage. + +Please note that for developer convenience, we usually treat the `Processed` and +`Committed` states as a single stage called `Committed` since they have no difference from the UX/DevEx standpoints. + +### The State of zkSync + +The current version of zkSync Era solves the needs of most applications on Ethereum, +and with more features planned for release soon, zkSync Era will provide developers +with a design space to experiment with applications not possible on Ethereum today. +With this release, we are supporting the following features: + +- Native support of ECDSA signatures: Unlike the first version of zkSync and other +ZK rollups, no special operation is required to register the user’s private key. +Any account can be managed in L2 with the same private key that is used for L1. +- Solidity 0.8.x support: Deploy your existing codebase with little to no changes required. +- With small exceptions, our Web3 API is fully compatible with Ethereum. This allows seamless integration with existing indexers, explorers, etc. +- Support for Ethereum cryptographic primitives: zkSync natively supports `keccak256`, `sha256`, and `ecrecover` via precompiles. +- Hardhat plugin: Enables easy testing and development of smart contracts on zkSync. +- L1 → L2 smart contract messaging: Allows developers to pass data from Ethereum to +smart contracts on zkSync, providing the required information to run various smart contracts. +- Native account abstraction: zkSync Era implements [account abstraction natively] +(/build/developer-reference/account-abstraction), which brings multiple UX improvements for all accounts. + +## Highlights of zkSync Era + +- Mainnet-like security with zero reliance on 3rd parties. +- Permissionless EVM-compatible smart contracts. +- Standard Web3 API. +- Preserving key EVM features, such as smart contract composability. +- Introducing new features, such as native account abstraction. + +## zkSync in comparison + +zkSync [stands out remarkably](https://blog.matter-labs.io/evaluating-ethereum-l2-scaling-solutions-a-comparison-framework-b6b2f410f955) +in security and usability among existing L2 scaling solutions. +Thanks to the combination of cutting-edge cryptography and on-chain data +availability, ZK rollups (the core network of zkSync) are the only L2 scaling +solution that doesn't require any operational activity to keep the funds safe. + +For example, users can go offline and still be able to withdraw their assets safely +when they come back, even if the ZK rollup validators are no longer around. +For a comprehensive distinction between zkSync Era and Ethereum, read this [guide](/build/developer-reference/ethereum-differences/evm-instructions). diff --git a/content/00.build/65.developer-reference/25.best-practices.md b/content/00.build/65.developer-reference/25.best-practices.md new file mode 100644 index 00000000..1baea598 --- /dev/null +++ b/content/00.build/65.developer-reference/25.best-practices.md @@ -0,0 +1,121 @@ +--- +title: Security and best practices +description: +--- + +Before diving into development on zkSync Era, it's crucial to consider the following recommendations. These best +practices will help you optimize your code, ensure security, and align with the unique characteristics of zkSync Era. + +## Use `call` over `.send` or `.transfer` + +Avoid using `payable(addr).send(x)`/`payable(addr).transfer(x)` because the 2300 gas stipend may not be enough +for such calls, especially if it involves state changes that require a large amount of L2 gas for data. Instead, we recommend using `call`. + +Instead of: + +```solidity +payable(addr).send(x) // or +payable(addr).transfer(x) +``` + +Use: + +```solidity +(bool s, ) = addr.call{value: x}(""); +require(s); +``` + +This converts the `send`/`transfer` functionality to `call` and [avoids potential security risks outlined here.](https://consensys.net/diligence/blog/2019/09/stop-using-soliditys-transfer-now/). + +While `.call` offers more flexibility compared to `.send` or `.transfer`, developers should be aware that `.call` +does not provide the same level of reentrancy protection as `.transfer`/`.send`. It's crucial to adhere to best +practices like the checks-effects-interactions pattern and/or use reentrancy guard protection to secure your +contracts against reentrancy attacks. It can help ensure the robustness and security of your smart contracts on the ZKSync VM, even under unexpected conditions. + +## Use the proxy pattern at the early stage of the protocol + +zkSync Era is based on the zk-friendly VM. Thus, we offer +[a dedicated compiler](/zk-stack/components/compiler/toolchain) +responsible for transforming conventional Solidity and Vyper code into zkEVM bytecode. + +While we have extensive test coverage to ensure EVM compatibility, issues may still appear. +We will implement the patches for these in a timely manner. + +To integrate a compiler bug fix, you need to recompile and upgrade your smart contract. We recommend using the +Proxy pattern for a few months after your first deployment on zkSync Era, even if you plan to migrate to an immutable +contract in the future. + +## Do not rely on EVM gas logic + +zkSync Era has a distinctive gas logic compared to Ethereum. There are two main drivers: + +- We have a state-diff-based data availability, which means that the price for the execution depends on the L1 gas price. +- zkSync VM has a different set of computational trade-offs compared to the standard computational model. In +practice, this means that the price for opcodes is different to Ethereum. Also, zkEVM contains a different set of +opcodes under the hood and so the “gas” metric of the same set of operations may be different on zkSync Era and on Ethereum. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Our fee model is being constantly improved and so it is highly recommended **NOT** to hardcode any constants since the fee +model changes in the future might be breaking for this constant. +:: + +## `gasPerPubdataByte` should be taken into account in development + +Due to the state diff-based fee model of zkSync Era, every transaction includes a constant called `gasPerPubdataByte`. + +Presently, the operator has control over this value. However, in EIP712 transactions, users also sign an upper bound +on this value, but the operator is free to choose any value up to that upper bound. Note, that even if the value +is chosen by the protocol, it still fluctuates based on the L1 gas price. Therefore, relying solely on gas is inadequate. + +A notable example is a Gnosis Safe’s `execTransaction` method: + +```solidity +// We require some gas to emit the events (at least 2500) after the execution and some to perform code until the execution (500) +// We also include the 1/64 in the check that is not send along with a call to counteract potential shortcomings because of EIP-150 +require(gasleft() >= ((safeTxGas * 64) / 63).max(safeTxGas + 2500) + 500, "GS010"); +// Use scope here to limit variable lifetime and prevent `stack too deep` errors +{ + uint256 gasUsed = gasleft(); + // If the gasPrice is 0 we assume that nearly all available gas can be used (it is always more than safeTxGas) + // We only subtract 2500 (compared to the 3000 before) to ensure that the amount passed is still higher than safeTxGas + success = execute(to, value, data, operation, gasPrice == 0 ? (gasleft() - 2500) : safeTxGas); + gasUsed = gasUsed.sub(gasleft()); + + // ... +} +``` + +While the contract does enforce the correct `gasleft()`, it does not enforce the correct `gasPerPubdata`, since there +was no such parameter on Ethereum. This means that a malicious user could call this wallet when the `gasPerPubdata` is +high and make the transaction fail, hence making it spend artificially more gas than required. + +This is the case for all relayer-like logic ported directly from Ethereum and so if you see your code relying on logic +like “the user should provide at X gas”, then the `gasPerPubdata` should be also taken into account on zkSync Era. + +For now, zkSync Era operators use honest values for ETH L1 price and `gasPerPubdata`, so it should not be an issue if +enough margin is added to the estimated gas. In order to prepare for the future decentralization of zkSync Era, +it is imperative that you update your contract. + +## Use native account abstraction over `ecrecover` for validation + +Use zkSync Era's native account abstraction support for signature validation instead of this function. + +We recommend not relying on the fact that an account has an ECDSA private key, since the account may be governed by +multisig and use another signature scheme. + +Read more about [zkSync Era Account Abstraction support](/build/developer-reference/account-abstraction). + +## Use local testing environment + +For optimal development and testing of your contracts, it is highly recommended to perform local testing before deploying +them to the mainnet. Local testing allows you to test your contracts in a controlled environment, providing benefits such as +reduced network latency and cost. + +We provide [two different testing environments](/build/test-and-debug) designed for local testing purposes. +These tools allow you to simulate the zkSync network locally, enabling you to validate your contracts effectively. + +By incorporating local testing into your development workflow, you can effectively verify the behavior and functionality of +your contracts in a controlled environment, ensuring a smooth deployment process to the mainnet. + +For detailed instructions on configuring the local testing environment and performing tests using Mocha and Chai, +refer to the dedicated [Testing](/build/test-and-debug) page. diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/10.evm-instructions.md b/content/00.build/65.developer-reference/30.ethereum-differences/10.evm-instructions.md new file mode 100644 index 00000000..4f54a965 --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/10.evm-instructions.md @@ -0,0 +1,368 @@ +--- +title: EVM Instructions +description: +--- + +## `CREATE`, `CREATE2` + +On zkSync Era, contract deployment is performed using the hash of the bytecode, and the `factoryDeps` field of EIP712 +transactions contains the bytecode. The actual deployment occurs by providing the contract's hash to the +`ContractDeployer` system contract. + +To guarantee that `create`/`create2` functions operate correctly, the compiler must be aware of the bytecode of the deployed +contract in advance. The compiler interprets the calldata arguments as incomplete input for `ContractDeployer`, +as the remaining part is filled in by the compiler internally. The Yul `datasize` and `dataoffset` instructions +have been adjusted to return the constant size and bytecode hash rather than the bytecode itself. + +The code below should work as expected: + +```solidity +MyContract a = new MyContract(); +MyContract a = new MyContract{salt: ...}(); +``` + +In addition, the subsequent code should also work, but it must be explicitly tested to ensure its intended functionality: + +```solidity +bytes memory bytecode = type(MyContract).creationCode; +assembly { + addr := create2(0, add(bytecode, 32), mload(bytecode), salt) +} +``` + +The following code will not function correctly because the compiler is not aware of the bytecode beforehand: + +```solidity +function myFactory(bytes memory bytecode) public { + assembly { + addr := create(0, add(bytecode, 0x20), mload(bytecode)) + } +} +``` + +Unfortunately, it's impossible to differentiate between the above cases during compile-time. As a result, we strongly +recommend including tests for any factory that deploys child contracts using `type(T).creationCode`. + +Since the deploy and runtime code is merged together on zkSync Era, we do not support `type(T).runtimeCode` and it +always produces a compile-time error. + +### Address derivation + +For zkEVM bytecode, zkSync Era uses a distinct address derivation method compared to Ethereum. The precise formulas +can be found in our SDK, as demonstrated below: + +<!-- TODO: @dutterbutter update to reflect zkSync-ethers v6 --> + +```typescript +export function create2Address(sender: Address, bytecodeHash: BytesLike, salt: BytesLike, input: BytesLike) { + const prefix = ethers.utils.keccak256(ethers.utils.toUtf8Bytes("zksyncCreate2")); + const inputHash = ethers.utils.keccak256(input); + const addressBytes = ethers.utils.keccak256(ethers.utils.concat([prefix, ethers.utils.zeroPad(sender, 32), salt, bytecodeHash, inputHash])).slice(26); + return ethers.utils.getAddress(addressBytes); +} + +export function createAddress(sender: Address, senderNonce: BigNumberish) { + const prefix = ethers.utils.keccak256(ethers.utils.toUtf8Bytes("zksyncCreate")); + const addressBytes = ethers.utils + .keccak256(ethers.utils.concat([prefix, ethers.utils.zeroPad(sender, 32), ethers.utils.zeroPad(ethers.utils.hexlify(senderNonce), 32)])) + .slice(26); + + return ethers.utils.getAddress(addressBytes); +} +``` + +Since the bytecode differs from Ethereum as zkSync uses a modified version of the EVM, the address derived from the bytecode hash will also differ. +This means that the same bytecode deployed on Ethereum and zkSync will have +different addresses and the Ethereum address will still be available and unused on +zkSync. If and when the zkEVM reaches parity with the EVM, the address derivation +will be updated to match Ethereum and the same bytecode will have the same address +on both chains, deployed bytecodes to different addresses on zkSync could then be +deployed to the same the Ethereum-matching addresses on zkSync. + +## `CALL`, `STATICCALL`, `DELEGATECALL` + +For calls, you specify a memory slice to write the return data to, e.g. `out` and `outsize` arguments for +`call(g, a, v, in, insize, out, outsize)`. In EVM, if `outsize != 0`, the allocated memory will grow to `out + outsize` +(rounded up to the words) regardless of the `returndatasize`. On zkSync Era, `returndatacopy`, similar to `calldatacopy`, +is implemented as a cycle iterating over return data with a few additional checks and triggering a panic if +`out + outsize > returndatasize` to simulate the same behavior as in EVM. + +Thus, unlike EVM where memory growth occurs before the call itself, on zkSync Era, the necessary copying of return data +happens only after the call has ended, leading to a difference in `msize()` and sometimes zkSync Era not panicking where +EVM would panic due to the difference in memory growth. + +```solidity +success := call(gas(), target, 0, in, insize, out, outsize) // grows to 'min(returndatasize(), out + outsize)' +``` + +```solidity +success := call(gas(), target, 0, in, insize, out, 0) // memory untouched +returndatacopy(out, 0, returndatasize()) // grows to 'out + returndatasize()' +``` + +Additionally, there is no native support for passing Ether on zkSync Era, so it is handled by a special system contract +called `MsgValueSimulator`. The simulator receives the callee address and Ether amount, performs all necessary balance +changes, and then calls the callee. + +## `MSTORE`, `MLOAD` + +Unlike EVM, where the memory growth is in words, on zkEVM the memory growth is counted in bytes. For example, if you write +`mstore(100, 0)` the `msize` on zkEVM will be `132`, but on the EVM it will be `160`. Note, that also unlike EVM which +has quadratic growth for memory payments, on zkEVM the fees are charged linearly at a rate of `1` erg per byte. + +The other thing is that our compiler can sometimes optimize unused memory reads/writes. This can lead to different `msize` +compared to Ethereum since fewer bytes have been allocated, leading to cases where EVM panics, but zkEVM will not due to +the difference in memory growth. + +## `CALLDATALOAD`, `CALLDATACOPY` + +If the `offset` for `calldataload(offset)` is greater than `2^32-33` then execution will panic. + +Internally on zkEVM, `calldatacopy(to, offset, len)` there is just a loop with the `calldataload` and `mstore` on each iteration. +That means that the code will panic if `2^32-32 + offset % 32 < offset + len`. + +## `RETURN`, `STOP` + +Constructors return the array of immutable values. If you use `RETURN` or `STOP` in an assembly block in the constructor on zkSync Era, +it will leave the immutable variables uninitialized. + +```solidity +contract Example { + uint immutable x; + + constructor() { + x = 45; + + assembly { + // The statements below are overridden by the zkEVM compiler to return + // the array of immutables. + + // The statement below leaves the variable x uninitialized. + // return(0, 32) + + // The statement below leaves the variable x uninitialized. + // stop() + } + } + + function getData() external pure returns (string memory) { + assembly { + return(0, 32) // works as expected + } + } +} + +``` + +## `TIMESTAMP`, `NUMBER` + +For more information about blocks on zkSync Era, including the differences between `block.timestamp` and `block.number`, +check out the [blocks on zkSync Documentation](/zk-stack/concepts/blocks). + +::callout{icon="i-heroicons-light-bulb"} +**Changes From the Previous Protocol Version**<br /> +Modifications were performed on how certain block properties were implemented on +zkSync Era. For details on the changes performed visit the [announcement on GitHub](https://github.com/zkSync-Community-Hub/zkync-developers/discussions/87). +:: + +## `COINBASE` + +Returns the address of the `Bootloader` contract, which is `0x8001` on zkSync Era. + +## `DIFFICULTY`, `PREVRANDAO` + +Returns a constant value of `2500000000000000` on zkSync Era. + +## `BASEFEE` + +This is not a constant on zkSync Era and is instead defined by the fee model. Most +of the time it is 0.25 gwei, but under very high L1 gas prices it may rise. + +## `SELFDESTRUCT` + +Considered harmful and deprecated in [EIP-6049](https://eips.ethereum.org/EIPS/eip-6049). + +Always produces a compile-time error with the zkEVM compiler. + +## `CALLCODE` + +Deprecated in [EIP-2488](https://eips.ethereum.org/EIPS/eip-2488) in favor of `DELEGATECALL`. + +Always produces a compile-time error with the zkEVM compiler. + +## `PC` + +Inaccessible in Yul and Solidity `>=0.7.0`, but accessible in Solidity `0.6`. + +Always produces a compile-time error with the zkEVM compiler. + +## `CODESIZE` + +| Deploy code | Runtime code | +| --------------------------------- | ------------- | +| Size of the constructor arguments | Contract size | + +Yul uses a special instruction `datasize` to distinguish the contract code and constructor arguments, so we +substitute `datasize` with 0 and `codesize` with `calldatasize` in zkSync Era deployment code. This way when Yul calculates the +calldata size as `sub(codesize, datasize)`, the result is the size of the constructor arguments. + +```solidity +contract Example { + uint256 public deployTimeCodeSize; + uint256 public runTimeCodeSize; + + constructor() { + assembly { + deployTimeCodeSize := codesize() // return the size of the constructor arguments + } + } + + function getRunTimeCodeSize() external { + assembly { + runTimeCodeSize := codesize() // works as expected + } + } +} +``` + +## `CODECOPY` + +| Deploy code | Runtime code (old EVM codegen) | Runtime code (new Yul codegen) | +| -------------------------------- | ------------------------------ | ------------------------------ | +| Copies the constructor arguments | Zeroes memory out | Compile-time error | + +```solidity +contract Example { + constructor() { + assembly { + codecopy(0, 0, 32) // behaves as CALLDATACOPY + } + } + + function getRunTimeCodeSegment() external { + assembly { + // Behaves as 'memzero' if the compiler is run with the old (EVM assembly) codegen, + // since it is how solc performs this operation there. On the new (Yul) codegen + // `CALLDATACOPY(dest, calldatasize(), 32)` would be generated by solc instead, and + // `CODECOPY` is safe to prohibit in runtime code. + // Produces a compile-time error on the new codegen, as it is not required anywhere else, + // so it is safe to assume that the user wants to read the contract bytecode which is not + // available on zkEVM. + codecopy(0, 0, 32) + } + } +} +``` + +## `EXTCODECOPY` + +Contract bytecode cannot be accessed on zkEVM architecture. Only its size is accessible with both `CODESIZE` and `EXTCODESIZE`. + +`EXTCODECOPY` always produces a compile-time error with the zkEVM compiler. + +## `DATASIZE`, `DATAOFFSET`, `DATACOPY` + +Contract deployment is handled by two parts of the zkEVM protocol: the compiler front end and the system contract called `ContractDeployer`. + +On the compiler front-end the code of the deployed contract is substituted with its hash. The hash is returned by the `dataoffset` +Yul instruction or the `PUSH [$]` EVM legacy assembly instruction. The hash is then passed to the `datacopy` Yul instruction or +the `CODECOPY` EVM legacy instruction, which writes the hash to the correct position of the calldata of the call to `ContractDeployer`. + +The deployer calldata consists of several elements: + +| Element | Offset | Size | +| --------------------------- | ------ | ---- | +| Deployer method signature | 0 | 4 | +| Salt | 4 | 32 | +| Contract hash | 36 | 32 | +| Constructor calldata offset | 68 | 32 | +| Constructor calldata length | 100 | 32 | +| Constructor calldata | 132 | N | + +The data can be logically split into header (first 132 bytes) and constructor calldata (the rest). + +The header replaces the contract code in the EVM pipeline, whereas the constructor calldata remains unchanged. +For this reason, `datasize` and `PUSH [$]` return the header size (132), and the space for constructor arguments is allocated by **solc** on top of it. + +Finally, the `CREATE` or `CREATE2` instructions pass 132+N bytes to the `ContractDeployer` contract, which makes all +the necessary changes to the state and returns the contract address or zero if there has been an error. + +If some Ether is passed, the call to the `ContractDeployer` also goes through the `MsgValueSimulator` just like ordinary calls. + +We do not recommend using `CREATE` for anything other than creating contracts with the `new` operator. However, a lot of contracts create contracts +in assembly blocks instead, so authors must ensure that the behavior is compatible with the logic described above. + +Yul example: + +```solidity +let _1 := 128 // the deployer calldata offset +let _2 := datasize("Callable_50") // returns the header size (132) +let _3 := add(_1, _2) // the constructor arguments begin offset +let _4 := add(_3, args_size) // the constructor arguments end offset +datacopy(_1, dataoffset("Callable_50"), _2) // dataoffset returns the contract hash, which is written according to the offset in the 1st argument +let address_or_zero := create(0, _1, sub(_4, _1)) // the header and constructor arguments are passed to the ContractDeployer system contract +``` + +EVM legacy assembly example: + +```solidity +010 PUSH #[$] tests/solidity/complex/create/create/callable.sol:Callable // returns the header size (132), equivalent to Yul's datasize +011 DUP1 +012 PUSH [$] tests/solidity/complex/create/create/callable.sol:Callable // returns the contract hash, equivalent to Yul's dataoffset +013 DUP4 +014 CODECOPY // CODECOPY statically detects the special arguments above and behaves like the Yul's datacopy +... +146 CREATE // accepts the same data as in the Yul example above +``` + +## `SETIMMUTABLE`, `LOADIMMUTABLE` + +zkEVM does not provide any access to the contract bytecode, so the behavior of immutable values is simulated with the system contracts. + +1. The deploy code, also known as constructor, assembles the array of immutables in the auxiliary heap. Each array element + consists of an index and a value. Indexes are allocated sequentially by `zksolc` for each string literal identifier allocated by `solc`. +2. The constructor returns the array as the return data to the contract deployer. +3. The array is passed to a special system contract called `ImmutableSimulator`, where it is stored in a mapping with + the contract address as the key. +4. In order to access immutables from the runtime code, contracts call the `ImmutableSimulator` to fetch a value using + the address and value index. In the deploy code, immutable values are read from the auxiliary heap, where they are still available. + +The element of the array of immutable values: + +```solidity +struct Immutable { + uint256 index; + uint256 value; +} +``` + +Yul example: + +```solidity +mstore(128, 1) // write the 1st value to the heap +mstore(160, 2) // write the 2nd value to the heap + +let _2 := mload(64) +let _3 := datasize("X_21_deployed") // returns 0 in the deploy code +codecopy(_2, dataoffset("X_21_deployed"), _3) // no effect, because the length is 0 + +// the 1st argument is ignored +setimmutable(_2, "3", mload(128)) // write the 1st value to the auxiliary heap array at index 0 +setimmutable(_2, "5", mload(160)) // write the 2nd value to the auxiliary heap array at index 32 + +return(_2, _3) // returns the auxiliary heap array instead +``` + +EVM legacy assembly example: + +```solidity +053 PUSH #[$] <path:Type> // returns 0 in the deploy code +054 PUSH [$] <path:Type> +055 PUSH 0 +056 CODECOPY // no effect, because the length is 0 +057 ASSIGNIMMUTABLE 5 // write the 1st value to the auxiliary heap array at index 0 +058 ASSIGNIMMUTABLE 3 // write the 2nd value to the auxiliary heap array at index 32 +059 PUSH #[$] <path:Type> +060 PUSH 0 +061 RETURN // returns the auxiliary heap array instead +``` diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/20.nonces.md b/content/00.build/65.developer-reference/30.ethereum-differences/20.nonces.md new file mode 100644 index 00000000..4d311d28 --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/20.nonces.md @@ -0,0 +1,40 @@ +--- +title: Nonces +description: +--- + +In Ethereum, each account is associated with a unique identifier known as a nonce. +For externally owned accounts (EOAs), the nonce fulfills three key functions: it +prevents replay attacks on the network, ensures transactions are executed in the +intended sequence, and acts as a unique identifier in the formula for deriving addresses. The nonce is incremented after each transaction is executed. + +In the context of smart contracts, the nonce has a singular purpose: it determines +the address of a contract deployed from another contract. When a new contract is +created using the `create` or `create2` functions, the nonce is increased to signify +the deployment of a new contract. Unlike EOAs, which can only increment their nonce +by one per transaction, smart contracts have the ability to increase their nonce +multiple times within a single transaction. + +Conversely, zkSync features native account abstraction, allowing accounts to +leverage the nonce for both replay attack protection and address derivation of +created contracts. Given that accounts in zkSync can be smart contracts, they may +deploy several contracts in a single transaction. + +In order to maintain the expected and convenient use of a nonce in both transaction +validation and contract deployment contexts, zkSync introduces two different nonces: + +- Transaction nonce +- Deployment nonce + +The transaction nonce is used for the transaction validation, while the deployment +nonce is incremented in the event of contract deployment. This way, accounts may +send many transactions by following only one nonce value and the contract may deploy +many other contracts without conflicting with the transaction nonce. + +There are also other minor differences between zkSync and Ethereum nonce management: + +- Newly created contracts begin with a **deployment nonce** value of zero. This +contrasts with Ethereum, where, following the specifications of +[EIP-161](https://eips.ethereum.org/EIPS/eip-161), the nonce for newly created contracts starts at one. +- On zkSync, the deployment nonce is incremented only if the deployment succeeds. +On Ethereum nonce on deployment is updated even in case creation failed. diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/30.libraries.md b/content/00.build/65.developer-reference/30.ethereum-differences/30.libraries.md new file mode 100644 index 00000000..cb6105de --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/30.libraries.md @@ -0,0 +1,14 @@ +--- +title: Libraries +description: +--- + +We rely on the **solc** optimizer for library inlining, so a library may only be used without deployment +if it has been inlined by the optimizer. + +The addresses of deployed libraries must be set in the project configuration. These addresses then replace their placeholders +in IRs: `linkersymbol` in Yul and `PUSHLIB` in EVM legacy assembly. + +All linking happens at compile-time. Deploy-time linking is not supported. + +For compiling non-linable libraries please refer to the documentation [here](/build/tooling/hardhat/compiling-libraries). diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/40.pre-compiles.md b/content/00.build/65.developer-reference/30.ethereum-differences/40.pre-compiles.md new file mode 100644 index 00000000..97e795d1 --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/40.pre-compiles.md @@ -0,0 +1,12 @@ +--- +title: Precompiles +description: +--- + +Some EVM cryptographic precompiles (notably pairings and RSA) aren't currently available. However, pairing is +prioritized to allow deployment of both ZK Chains and protocols like Aztec/Dark Forest without modifications. + +Ethereum cryptographic primitives like `ecrecover`, `keccak256`, `sha256`, `ecadd` and `ecmul` are supported as precompiles. +No actions are required from your side as all the calls to the precompiles are done by the compilers under the hood. + +It's important to be aware that the gas costs and behaviors of these precompiles when invoked via delegatecall may differ from those on Ethereum. diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/50.native-vs-eip4337.md b/content/00.build/65.developer-reference/30.ethereum-differences/50.native-vs-eip4337.md new file mode 100644 index 00000000..01d813ce --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/50.native-vs-eip4337.md @@ -0,0 +1,26 @@ +--- +title: Native AA vs EIP 4337 +description: +--- + +The native account abstraction of zkSync and Ethereum's EIP 4337 aim to enhance +accounts' flexibility and user experience, but they differ in critical aspects listed below: + +1. **Implementation Level**: zkSync's account abstraction is integrated at the +protocol level; however, EIP 4337 avoids the implementation at the protocol level. +2. **Account Types**: on zkSync Era, smart contract accounts and paymasters are +first-class citizens. Under the hood, all accounts (even EOAs) behave like smart +contract accounts; **all accounts support paymasters**. +3. **Transaction Processing**: EIP 4337 introduces a separate transaction flow for +smart contract accounts, which relies on a separate mempool for user operations, and +Bundlers - nodes that bundle user operations and sends them to be processed by the +EntryPoint contract, resulting in two separate transaction flows. In contrast, on +zkSync Era there is a unified mempool for transactions from both Externally Owned +Accounts (EOAs) and smart contract accounts. On zkSync Era, the Operator takes on +the role of bundling transactions, irrespective of the account type, and sends them +to the Bootloader (similar to the EntryPoint contract), which results in a single +mempool and transaction flow. +4. **Paymasters support**: zkSync Era allows both EOAs and smart contract accounts +to benefit from paymasters thanks to its single transaction flow. On the other hand, +EIP 4337 does not support paymasters for EOAs because paymasters are only +implemented in the new transaction flow for smart contract accounts. diff --git a/content/00.build/65.developer-reference/30.ethereum-differences/60.contract-deployment.md b/content/00.build/65.developer-reference/30.ethereum-differences/60.contract-deployment.md new file mode 100644 index 00000000..b4169817 --- /dev/null +++ b/content/00.build/65.developer-reference/30.ethereum-differences/60.contract-deployment.md @@ -0,0 +1,169 @@ +--- +title: Contract Deployment +description: Overview of the differences in contract deployment. +--- + + +In order to maintain the same level of security as the L1, the zkSync operator is +required to publish the code for each contract it deploys on the Ethereum chain. +However, if multiple contracts are deployed using the same code, the operator only +needs to publish it on Ethereum once. While the initial deployment of contracts can +be relatively expensive, utilizing contract factories that deploy contracts with +the same code multiple times can lead to huge savings compared to the L1. + +These specific requirements ensure that the process of deploying smart contracts on +zkEVM complies to a crucial rule: _the operator must be aware of the contract's +code before deployment_. Consequently, deploying contracts can only be accomplished +through EIP712 transactions, with the `factory_deps` field containing the bytecode provided. + +[Learn more about EIP712 transactions here](/zk-stack/concepts/transaction-lifecycle#eip-712-0x71). + +## Ethereum / zkSync differences in contract deployment + +**How deploying contracts works on Ethereum.** + +To deploy a contract on Ethereum, a user sends a transaction to the zero address +(`0x000...000`) with the `data` field of the transaction equal to the contract +bytecode concatenated with the constructor parameters. + +**How deploying contracts works on zkSync.** + +To deploy a contract on zkSync Era, a user calls the `create` function of the +[ContractDeployer system contract](/build/developer-reference/era-contracts/system-contracts) +providing the hash of the contract to be published, as well as the constructor +arguments. The contract bytecode itself is supplied in the `factory_deps` field of +the transaction (as it's an [EIP712 transaction](/zk-stack/concepts/transaction-lifecycle#eip-712-0x71)). If the contract is a factory +(i.e. it can deploy other contracts), these contracts' bytecodes should be included +in the `factory_deps` as well. + +We recommend using the [hardhat-zksync-deploy](/build/tooling/hardhat/getting-started) plugin, to +simplify the deployment process. It provides classes +and methods to take care of all the deployment requirements, like generating the +[bytecode hash of the contract](#contract-size-limit-and-format-of-bytecode-hash). + +### Note on `factory_deps` + +You might wonder how validators obtain the preimage of the bytecode hashes +necessary to execute the code. This is where the concept of factory dependencies, +or factory_deps for short, comes into play. Factory dependencies refer to a list of +bytecode hashes whose corresponding preimages were previously revealed on the L1 +(where data is always available). + +Under the hood, zkSync does not store bytecodes of contracts in its state tree, but +[specially formatted hashes of the bytecodes](#contract-size-limit-and-format-of-bytecode-hash). You can see that the +[ContractDeployer](/build/developer-reference/era-contracts/system-contracts) system contract accepts +the bytecode hash of the deployed contract and not its bytecode. However, for +contract deployment to succeed, the operator needs to know the bytecode. The +`factory_deps` field of the transaction is used for this reason: it contains the +bytecodes that should be known to the operator for this transaction to succeed. +Once the transaction succeeds, these bytecodes are published on L1 and are considered "known" to the operator forever. + +Some examples of usage are: + +- The obvious one is when you deploy a contract, you need to provide its code in the `factory_deps` field. +- On zkSync, factories (i.e. contracts that can deploy other contracts) do not +store bytecodes of their dependencies, i.e. contracts that they can deploy. They +only store their hashes. That's why you need to include _all_ the bytecodes of the +dependencies in the `factory_deps` field. + +Both of these examples are already seamlessly done under the hood by our +[hardhat-zksync-deploy](/build/tooling/hardhat/getting-started). + +Note that the factory deps do not necessarily have to be used by the transaction in +any way. These are just markers that these bytecodes should be published on L1 with +this transaction. If your contract contains a lot of various factory dependencies +and they do not fit inside a single L1 block, you can split the list of factory +dependencies between multiple transactions. + +For example, let's say that you want to deploy contract `A` which can also deploy +contracts `B` and `C`. This means that you will have three factory dependencies for +your deployment transaction: `A`,`B` and `C`. If the pubdata required to publish +all of them is too large to fit into one block, you can send a dummy transaction +with only factory dependencies `A` and `B` (assuming their combined length is small +enough) and do the actual deploy with a second transaction while providing the +bytecode of contract `C` as a factory dependency for it. Note that if some contract +_on its own_ is larger than the allowed limit per block, this contract has to be split into smaller ones. + +### Contract size limit and format of bytecode hash + +Each zkEVM bytecode must adhere to the following format: + +- Its length must be divisible by 32. +- Its length in words (32-byte chunks) should be odd. In other words, `bytecodeLength % 64 == 32`. +- There is a VM limit, the bytecode can not be more than `2^16` 32-byte words, i.e. `2^21` bytes. +- The bootloader has a memory limit for supplying pubdata of 450999 bytes, +therefore limiting the contract size to it as well. This limit is valid for +Validium ZK Chains, that don’t have to publish the bytecode to the base layer. +- For rollups that must publish the deployed bytecode to the base layer (e.g. +Ethereum), there is an additional pubdata limit, which is normally smaller. By +default, for each batch, this limit is set to 100000 bytes for ZK Chains using +calldata DA, or 120000\*number_of_blobs, for ZK Chains using EIP-4844 blobs. + +The 32-byte hash of the bytecode of a zkSync contract is calculated in the following way: + +- The first 2 bytes denote the version of bytecode hash format and are currently equal to `[1,0]`. +- The second 2 bytes denote the length of the bytecode in 32-byte words. +- The rest of the 28-byte (i.e. 28 low big-endian bytes) are equal to the last 28 bytes of the `sha256` hash of the contract's bytecode. + +## Smart contract security + +Smart contract security is critical. A single vulnerability in a smart contract can +lead to loss of funds. Make sure your contracts are secure against common threats. + +A common Solidity smart contract attack is reentrancy. This threat exploits +vulnerabilities in contract code that allow an attacker to repeatedly call a function that withdraws funds. + +Auditing smart contracts for security holes prevents theft and other malicious +activities. An audit involves a thorough review of the contract's code and its +underlying logic to identify any vulnerabilities or weaknesses that could be +exploited by attackers. Auditors look for things like buffer overflows, integer +overflows, and other types of security issues that can lead to the loss of assets +or other unwanted outcomes. This review process should include both manual and +automated testing to ensure that all vulnerabilities are identified. + +The process of auditing a smart contract should be carried out by experts who have +the necessary knowledge and experience to identify potential security risks. Investing in a thorough audit can help prevent security breaches and protect +investors and users from losses, reputation damage, and legal issues. Therefore, +it's essential to prioritize smart contract security and take proactive measures to +ensure that they are thoroughly audited for security holes before deploying your +smart contract on zkSync Era network. + +For detailed information on smart contract vulnerabilities and security best practices, refer to the following resources: + +- [Cyfrin Updraft Security & Auditing Curriculum](https://updraft.cyfrin.io/courses/security). +- [Consensys smart contract best practices](https://consensys.github.io/smart-contract-best-practices/). +- [Solidity docs security considerations](https://docs.soliditylang.org/en/latest/security-considerations.html). +- [Security considerations and best practices on zkSync](/build/developer-reference/best-practices) + +### Differences in `create()` behaviour + +To facilitate [support for account abstraction](/build/developer-reference/account-abstraction), zkSync splits the nonce of each account +into two parts: the deployment nonce and the transaction nonce. The deployment nonce represents the +number of contracts the account has deployed using the `create()` opcode, while the +transaction nonce is used for protecting against replay attacks for transactions. + +This distinction implies that, while the nonce on zkSync behaves similarly to +Ethereum for smart contracts, calculating the address of a deployed contract for +externally owned accounts (EOAs) is not as straightforward. + +On Ethereum, it can be safely determined using the formula `hash(RLP[address, +nonce])`. However, on zkSync, it is advisable to wait until the contract is +deployed and catch the `ContractDeployed` event emitted by the +[ContractDeployer](/build/developer-reference/era-contracts/system-contracts), which provides the address +of the newly deployed contract. The SDK handles all of these processes in the background to simplify the workflow. + +To have a deterministic address, you should use the `create2` method from +[ContractDeployer](/build/developer-reference/era-contracts/system-contracts). It is available for EOAs as well. + +## Deploying contracts from L1 + +Deploying contracts on zkSync Era is also possible via L1-L2 communication. + +The [interface](https://github.com/matter-labs/era-contracts/blob/main/l1-contracts/contracts/zksync/interfaces/IZkSync.sol) +for submitting L1->L2 transactions accepts +the list of all the factory dependencies required for this particular transaction. +The logic for working with them is the same as for the default L2 deployments. The +only difference is that since the user has already published the full preimage for +the bytecodes on L1, there is no need to publish these bytecodes again on L1. + +To learn more about L1-L2 communication on zkSync Era, visit [this section of the docs](/build/developer-reference/l1-l2-interoperability). diff --git a/content/00.build/65.developer-reference/40.account-abstraction/10.index.md b/content/00.build/65.developer-reference/40.account-abstraction/10.index.md new file mode 100644 index 00000000..93380acb --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/10.index.md @@ -0,0 +1,34 @@ +--- +title: Introduction +description: Discover how zkSync native Account Abstraction enhances transaction flexibility and user experience. +--- + +On Ethereum there are two types of accounts: + +- [externally owned accounts (EOAs)](https://ethereum.org/en/developers/docs/accounts/#externally-owned-accounts-and-key-pairs) +- [contracts accounts](https://ethereum.org/en/developers/docs/accounts/#contract-accounts) + +::callout +To better understand this page, we recommend you take some time to first read a guide on [accounts](https://ethereum.org/en/developers/docs/accounts/). +:: + +The former type is the only one that can initiate transactions, +while the latter is the only one that can implement arbitrary logic. For some +use-cases, like smart-contract wallets or privacy protocols, this difference can +create a lot of friction. + +As a result, such applications require L1 relayers, e.g. an EOA to help facilitate +transactions from a smart-contract wallet. + +Accounts in zkSync Era can initiate transactions, like an EOA, but can also have +arbitrary logic implemented in them, like a smart contract. This feature, called +"account abstraction" (AA), aims to resolve the issues described above. + +Native Account Abstraction on zkSync Era fundamentally changes how accounts operate +by introducing the concept of Smart Accounts and Paymasters. Smart Accounts are +fully programmable, allowing for various customizations such as signature schemes, +native multi-sig capabilities, spending limits, and application-specific restrictions. + +Paymasters, conversely, can sponsor transactions for users, enabling users to pay +transaction fees in ERC20 tokens. This innovative approach to account management +significantly enhances user experience, security, and flexibility, paving the way for broader adoption of blockchain technology. diff --git a/content/00.build/65.developer-reference/40.account-abstraction/20.design.md b/content/00.build/65.developer-reference/40.account-abstraction/20.design.md new file mode 100644 index 00000000..08a93acb --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/20.design.md @@ -0,0 +1,203 @@ +--- +title: Design +description: Overview of zkSync's account abstraction design, focusing on enhancing transaction efficiency and user experience. +--- + +The account abstraction protocol on zkSync is very similar to [EIP4337](https://eips.ethereum.org/EIPS/eip-4337), +though our protocol is still different for the sake of efficiency and better UX. + +## Keeping nonces unique + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +- The current model does not allow custom wallets to send multiple transactions at the same time +and maintain deterministic ordering. +- For EOAs, nonces are expected to grow sequentially; while for custom accounts the order of transactions cannot be guaranteed. +- In the future, we plan to switch to a model where accounts can choose between sequential or arbitrary nonce-ordering. +:: + +One of the important invariants of every blockchain is that each transaction has a +unique hash. Holding this property with an arbitrary account abstraction is not +trivial, though accounts can, in general, accept multiple identical transactions. +Even though these transactions would be technically valid by the rules of the +blockchain, violating hash uniqueness would be very hard for indexers and other tools to process. + +There needs to be a solution on the protocol level that is both cheap for users and +robust in case of a malicious operator. One of the easiest ways to ensure that +transaction hashes do not repeat is to have a pair (sender, nonce) always unique. + +The following protocol is used: + +- Before each transaction starts, the system queries the +[NonceHolder](/build/developer-reference/era-contracts/system-contracts#nonceholder) to check whether the provided nonce has already been used or not. +- If the nonce has not been used yet, the transaction validation is run. The provided nonce is expected to be marked as "used" during this time. +- After the validation, the system checks whether this nonce is now marked as used. + +Users will be allowed to use any 256-bit number as nonce and they can put any +non-zero value under the corresponding key in the system contract. This is already supported by the protocol, but not on the server side. + +More documentation on various interactions with the `NonceHolder` system contract +as well as tutorials will be available once support on the server side is released. +For now, it is recommended to only use the `incrementMinNonceIfEquals` method, +which practically enforces the sequential ordering of nonces. + +## Standardizing transaction hashes + +In the future, it is planned to support efficient proofs of transaction inclusion +on zkSync. This would require us to calculate the transaction's hash in the +[bootloader](/zk-stack/components/zksync-evm/bootloader). Since these +calculations won't be free to the user, it is only fair to include the +transaction's hash in the interface of the AA methods (in case the accounts may +need this value for some reason). That's why all the methods of the `IAccount` and +`IPaymaster` interfaces, which are described below, contain the hash of the +transaction as well as the recommended signed digest (the digest that is signed by +EOAs for this transaction). + +## Interfaces + +### IAccount interface + +Each account is recommended to implement the +[IAccount](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/IAccount.sol) interface. + +It contains the following five methods: + +- `validateTransaction` is mandatory and will be used by the system to determine if +the AA logic agrees to proceed with the transaction. In case the transaction is not +accepted (e.g. the signature is wrong) the method should revert. In case the call +to this method succeeds, the implemented account logic is considered to accept the +transaction, and the system will proceed with the transaction flow. +- `executeTransaction` is mandatory and will be called by the system after the fee +is charged from the user. This function should perform the execution of the transaction. +- `payForTransaction` is optional and will be called by the system if the +transaction has no paymaster, i.e. the account is willing to pay for the +transaction. This method should be used to pay for the fees by the account. Note, +that if your account will never pay any fees and will always rely on the [paymaster](paymasters) feature, you don't have to implement this method. +This method must send at least `tx.gasprice * tx.gasLimit` ETH to the [bootloader](/zk-stack/components/zksync-evm/bootloader) address. +- `prepareForPaymaster` is optional and will be called by the system if the +transaction has a paymaster, i.e. there is a different address that pays the +transaction fees for the user. This method should be used to prepare for the +interaction with the paymaster. One of the notable +[examples](paymasters#approval-based-paymaster-flow) where it can be helpful is to approve the ERC-20 +tokens for the paymaster. +- `executeTransactionFromOutside`, technically, is not mandatory, but it is _highly +encouraged_, since there needs to be some way, in case of priority mode (e.g. if +the operator is unresponsive), to be able to start transactions from your account +from "outside" (basically this is the fallback to the standard Ethereum approach, where an EOA starts transaction from your smart contract). + +### IPaymaster interface + +Like in EIP4337, our account abstraction protocol supports paymasters: accounts +that can compensate for other accounts' transactions execution. You can read more about them [here](paymasters). + +Each paymaster should implement the +[IPaymaster](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/IPaymaster.sol) interface. + +It contains the following two methods: + +- `validateAndPayForPaymasterTransaction` is mandatory and will be used by the +system to determine if the paymaster approves paying for this transaction. If the +paymaster is willing to pay for the transaction, this method must send at least +`tx.gasprice * tx.gasLimit` to the operator. It should return the `context` that +will be one of the call parameters to the `postTransaction` method. +- `postTransaction` is optional and is called after the transaction executes. Note +that unlike EIP4337, there _is no guarantee that this method will be called_. In +particular, this method won't be called if the transaction fails with `out of gas` +error. It takes four parameters: + + - the context returned by `validateAndPayForPaymasterTransaction`, + - the transaction itself, + - a flag that indicates whether the transaction execution succeeded, + - the maximum amount of gas the paymaster might be refunded with + +## Reserved fields of the `Transaction` struct + +Note that each of the methods above accept the +[Transaction](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/libraries/TransactionHelper.sol) struct. + +While some of its fields are self-explanatory, there are also 6 `reserved` fields, +the meaning of each is defined by the transaction's type. We decided to not give +these fields names, since they might be unneeded in some future transaction types. For now, the convention is: + +- `reserved[0]` is the nonce. +- `reserved[1]` is `msg.value` that should be passed with the transaction. + +## Transaction Flow + +Each transaction is processed through the following stages: + +### Validation + +During validation, the system determines if the transaction is acceptable. If the +transaction fails at any validation point, no fees are charged to the account, and the transaction cannot be included in a block. + +#### Steps in the Validation Process + +1. **Nonce Verification**: The system verifies that the transaction's nonce has not +been previously used. Further details on maintaining nonce uniqueness can be found [here](#keeping-nonces-unique). + +2. **Transaction Validation**: The `validateTransaction` method on the account is +invoked. If this method executes successfully without reverting, the process moves to the next step. + +3. **Nonce Marking**: Post-validation, the system marks the nonce of the transaction as used. + +4. **Fee Handling**: + - **Standard Transactions**: The `payForTransaction` method is called on the account. If this method does not revert, the transaction proceeds. + - **Paymaster Transactions**: Initially, the `prepareForPaymaster` method is called on the sender. If successful, it is followed by the + `validateAndPayForPaymasterTransaction` method on the paymaster. If neither method reverts, the process moves forward. + +5. **Funds Verification**: The system ensures that the bootloader has received at +least `tx.gasPrice * tx.gasLimit` ETH. If the required funds are present, the transaction is deemed verified and is ready for the next step. + +### Execution + +The execution step is responsible for performing the actual transaction operations +and refunding any unused gas to the user. Even if this step results in a revert, the transaction remains valid and is included in the block. + +#### Steps in the Execution Process + +1. **Execute Transaction**: The `executeTransaction` method on the account is called to carry out the transaction. + +2. **Paymaster Post-Transaction Handling** (Applicable only if a paymaster is +involved): The `postTransaction` method on the paymaster is invoked. This method +typically handles the refund of unused gas to the sender, especially in scenarios +where the paymaster facilitates fee payment in ERC-20 tokens. + +### Fees + +The handling of transaction fees varies between different protocols, as illustrated by EIP-4337 and zkSync Era. + +#### Gas Limits in EIP-4337 +EIP-4337 defines three types of gas limits to manage the costs associated with different transaction stages: + +- **`verificationGas`**: Covers the gas required for transaction verification. +- **`executionGas`**: Allocates gas for the execution of the transaction. +- **`preVerificationGas`**: Specifies the gas used prior to the main verification process. + +#### Unified Gas Limit in zkSync Era +Contrastingly, zkSync Era simplifies the fee structure by using a single `gasLimit` +field for all transaction-related costs. This unified `gasLimit` must be adequately +set to cover: + +- Verification of the transaction. +- Payment of the fee, including any ERC-20 transfers. +- Execution of the transaction itself. + +#### Estimating Gas +By default, the `estimateGas` function calculates the required gas amount and +includes an additional constant. This constant accounts for fee payment and signature verification for Externally Owned Account (EOA) transactions. + +## Using the `SystemContractsCaller` library + +For the sake of security, both `NonceHolder` and the `ContractDeployer` system +contracts can only be called with a special `isSystem` flag. You can read more about it [here](/build/developer-reference/era-contracts/system-contracts#protected-access-to-some-of-the-system-contracts). + +To make a call with this flag, the `systemCall`/`systemCallWithPropagatedRevert`/`systemCallWithReturndata` methods of the +[SystemContractsCaller](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/libraries/SystemContractsCaller.sol) +library should be used. + +Using this library is practically a must when developing custom accounts since this +is the only way to call non-view methods of the `NonceHolder` system contract. +Also, you will have to use this library if you want to allow users to deploy +contracts of their own. You can use the +[implementation](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/DefaultAccount.sol) of the EOA account as a reference. diff --git a/content/00.build/65.developer-reference/40.account-abstraction/30.extending-4337.md b/content/00.build/65.developer-reference/40.account-abstraction/30.extending-4337.md new file mode 100644 index 00000000..af52cf41 --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/30.extending-4337.md @@ -0,0 +1,41 @@ +--- +title: Extending EIP-4337 +description: Overview of the extensions to zkSync's native Account Abstraction from EIP4337. +--- + +To provide DoS protection for the operator, EIP4337 imposes several +[restrictions](https://eips.ethereum.org/EIPS/eip-4337#simulation) on the validation step of the +account. + +Most of them, especially those regarding the forbidden opcodes, are still relevant. However, several restrictions have been lifted for better UX. + +### Extending the allowed opcodes + +- It is allowed to `call`/`delegateCall`/`staticcall` contracts that were already +deployed. Unlike Ethereum, we have no way to edit the code that was deployed or +delete the contract via selfdestruct, so we can be sure that the code during the +execution of the contract will be the same. + +### Extending the set of slots that belong to a user + +In the original EIP, the `validateTransaction` step of the AA allows the account to +read only the storage slots of its own. However, there are slots that +_semantically_ belong to that user but are actually located on another contract’s +addresses. A notable example is `ERC20` balance. + +This limitation provides DDoS safety by ensuring that the slots used for validation +by various accounts _do not overlap_, so there is no need for them to _actually_ +belong to the account’s storage. + +To enable reading the user's ERC20 balance or allowance on the validation step, the +following types of slots will be allowed for an account with address `A` on the validation step: + +1. Slots that belong to address `A`. +2. Slots `A` on any other address. +3. Slots of type `keccak256(A || X)` on any other address. (to cover `mapping(address => value)`, which is usually used for balance in ERC20 tokens). + +### What could be allowed in the future? + +In the future, we might even allow time-bound transactions, e.g. allow checking +that `block.timestamp <= value` if it returned `false`, etc. This would require +deploying a separate library of such trusted methods, but it would greatly increase the capabilities of accounts. diff --git a/content/00.build/65.developer-reference/40.account-abstraction/40.building-smart-accounts.md b/content/00.build/65.developer-reference/40.account-abstraction/40.building-smart-accounts.md new file mode 100644 index 00000000..8fe3bf54 --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/40.building-smart-accounts.md @@ -0,0 +1,82 @@ +--- +title: Building Smart Accounts +description: Discover the process of building custom smart accounts. +--- + +To build custom accounts on our platform, developers must implement specific +interfaces and follow our recommended best practices for account deployment and management. + +### Interface Implementation + +Every custom account should implement the [IAccount](design#iaccount-interface) +interface. You can find an example of a typical account implementation, resembling +standard Externally Owned Accounts (EOA) on Ethereum, in the +[DefaultAccount.sol](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/DefaultAccount.sol) on GitHub. + +This implementation returns an empty value when called by an external address, which may not be the desired behavior for your custom account. + +### EIP1271 Integration + +For smart wallets, we highly encourage the implementation of the [EIP1271](https://eips.ethereum.org/EIPS/eip-1271) signature-validation scheme. +This standard is endorsed by the zkSync team and is integral to our signature-verification library. + +### Deployment Process + +Deploying account logic follows a process similar to deploying a standard smart +contract. However, to distinguish smart contracts that are not intended to be +treated as accounts, use the `createAccount`/`create2Account` methods of the +deployer system contract instead of `create`/`create2`. + +#### Example Using `zksync-ethers` SDK (v5) + +```ts +import { ContractFactory } from "zksync-ethers"; + +const contractFactory = new ContractFactory(abi, bytecode, initiator, "createAccount"); +const aa = await contractFactory.deploy(...args); +await aa.deployed(); +``` + +### Verification Step Limitations + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +Currently, the verification rules for custom accounts are not fully enforced, which might change in the future: + +- Accounts must only interact with slots that belong to them. +- Context variables (e.g., `block.number`) are prohibited in account logic. +- Accounts must increment the nonce by 1 to maintain hash collision resistance. + +:: + +These limitations are not yet enforceable at the circuit/VM level and do not apply to L1->L2 transactions. + +### Nonce Management + +Both transaction and deployment nonces are consolidated within the +[NonceHolder](/build/developer-reference/era-contracts/system-contracts#nonceholder) system contract for optimization. +Use the [incrementMinNonceIfEquals](https://github.com/matter-labs/era-contracts/blob/6250292a98179cd442516f130540d6f862c06a16/system-contracts/contracts/NonceHolder.sol#L110) +function to safely increment your account's nonce. + +### Sending Transactions + +Currently, only EIP712 formatted transactions are supported for sending from custom +accounts. Transactions must specify the `from` field as the account's address and +include a `customSignature` in the `customData`. + +#### Example Transaction Submission + +```ts +import { utils } from "zksync-ethers"; + +// Here, `tx` is a `TransactionRequest` object from `zksync-ethers` SDK. +// `zksyncProvider` is the `Provider` object from `zksync-ethers` SDK connected to the ZKSync network. +tx.from = aaAddress; +tx.customData = { + ...tx.customData, + customSignature: aaSignature, +}; +const serializedTx = utils.serialize({ ...tx }); + +const sentTx = await zksyncProvider.sendTransaction(serializedTx); +``` diff --git a/content/00.build/65.developer-reference/40.account-abstraction/50.paymasters.md b/content/00.build/65.developer-reference/40.account-abstraction/50.paymasters.md new file mode 100644 index 00000000..f1422f85 --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/50.paymasters.md @@ -0,0 +1,156 @@ +--- +title: Paymasters +description: Overview of the different paymaster flows and use cases. +--- + +Paymasters are specialized accounts designed to subsidize transaction fees for +users, enhancing usability and flexibility within protocols. They also facilitate +the payment of fees using ERC20 tokens, instead of the default ETH. + +## Interacting with Paymasters + +To utilize a paymaster, users must specify a non-zero `paymaster` address in their +EIP712 transaction, accompanied by relevant data in the `paymasterInput` field. + +### Paymaster Verification Rules + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +- Verification rules are not fully enforced yet. +- Paymasters that do not comply with these rules may cease to function correctly in the future. + +:: + +To mitigate potential DoS attacks by malicious paymasters, a reputation scoring system similar to +[EIP4337](https://eips.ethereum.org/EIPS/eip-4337#reputation-scoring-and-throttlingbanning-for-paymasters) is used. Unlike +in EIP4337, paymasters in our system can interact with any storage slots and are +not throttled under specific conditions, such as time elapsed since the last successful verification or consistent slot access patterns. + +## Built-in Paymaster Flows + +Paymasters can operate automatically or require user interaction, depending on +their design. For instance, a paymaster that exchanges ERC20 tokens for ETH would +require users to grant a necessary allowance. + +The account abstraction protocol by itself is generic and allows both accounts and +paymasters to implement arbitrary interactions. However, the code of default +accounts (EOAs) is constant, but we still want them to be able to participate in +the ecosystem of custom accounts and paymasters. That's why we have standardized +the `paymasterInput` field of the transaction to cover the most common uses-cases of the paymaster feature. + +Your accounts are free to implement or not implement the support for these flows. +However, this is highly encouraged to keep the interface the same for both EOAs and custom accounts. + +### General Paymaster Flow + +This flow is used when the paymaster does not require any preliminary actions from the user: + +The `paymasterInput` field must be encoded as a call to a function with the following interface: + +```solidity +function general(bytes calldata data); +``` + +For EOA accounts, this input is typically non-functional, but paymasters can interpret the data as needed. + +### Approval-Based Paymaster Flow + +This flow is essential when a user must set a token allowance for the paymaster. +The `paymasterInput` field must be encoded as a call to a function with the following signature: + +```solidity +function approvalBased( + address _token, + uint256 _minAllowance, + bytes calldata _innerInput +); +``` + +The EOA will ensure that the allowance of the `_token` towards the paymaster is set +to at least `_minAllowance`. The `_innerInput` param is an additional payload that +can be sent to the paymaster to implement any logic (e.g. an additional signature or key that can be validated by the paymaster). + +If you are developing a paymaster, you _should not_ trust the transaction sender to +behave honestly (e.g. provide the required allowance with the `approvalBased` +flow). These flows serve mostly as instructions to EOAs and the requirements should always be double-checked by the paymaster. + +## Testnet paymaster + +To ensure users experience paymasters on testnet, as well as keep supporting paying +fees in ERC20 tokens, the Matter Labs team provides the testnet paymaster, that +enables paying fees in ERC20 token at a 1:1 exchange rate with ETH (i.e. one unit of this token is equal to 1 wei of ETH). + +The paymaster supports only the approval based paymaster flow and requires +that the `token` param is equal to the token being +swapped and `minAllowance` to equal to least `tx.maxFeePerGas * tx.gasLimit`. +In addition, the testnet paymaster does not make use of the `_innerInput` parameter, +so nothing should be provided (empty `bytes`). + +## Estimating Gas When Interacting with a Paymaster + +Interacting with a paymaster generally consumes more gas than a standard +transaction due to additional computations and operations. The primary factors +contributing to this increased gas usage are: + +1. **Internal Computations**: These include the operations within the paymaster's `validateAndPayForPaymasterTransaction` and `postTransaction`. +2. **Funds Transfer**: The gas consumed when a paymaster sends funds to the bootloader. +3. **ERC20 Token Allowance Management**: Optionally, if the user compensates the +paymaster with an ERC20 token, managing the token's allowance consumes additional gas. + +- The gas for internal computations is usually minimal, depending on the specific paymaster's implementation. +- The cost of transferring funds is comparable to what users might pay for similar transactions independently. +- Managing ERC20 allowances can significantly impact gas usage, particularly if +it's the first time the user is setting an allowance. This process might require +publishing a 32-byte storage key identifier, potentially using up to 400k gas at a +50 gwei L1 gas price. Notably, while the transactional flow often zeroes out the +storage slot at execution's end (hence "grant `X` allowance + paymaster spends all +allowance"), the initial cost is pre-charged during execution. Only if the slot is zeroed at the end of the transaction will the user be refunded. + +### Importance of Accurate Gas Estimation + +Accurate gas estimation is crucial, especially for operations involving extensive +pubdata, like writing to storage. You should include the necessary `paymasterInput` +during estimation to ensure the paymaster's involvement is accurately accounted +for. +The code snippet below, from the [Custom Paymaster Tutorial](https://code.zksync.io/tutorials/erc20-paymaster), +demonstrates how to perform this estimation: + +```ts +const gasLimit = await erc20.estimateGas.mint(wallet.address, 5, { + customData: { + gasPerPubdata: utils.DEFAULT_GAS_PER_PUBDATA_LIMIT, + paymasterParams: paymasterParams, + }, +}); +``` + +Here, `paymasterParams` includes both the address of the paymaster and its input. +However, `paymasterInput` often contains parameters that are difficult to predict +ahead of time, such as the exact amount of tokens required by the user. + +Additionally, paymasters may need to verify pricing data or conversion rates, possibly requiring a server-side signature. + +### Handling Complex Dependencies + +Complex dependencies, such as those involving signatures that depend on transaction content, pose challenges: + +- Returning a `magic = 0` from `validateAndPayForPaymasterTransaction` can simulate +the gas consumption of a valid signature verification. This ensures that, although +the transaction would fail on mainnet due to `magic = 0`, the correct gas amount can still be estimated. +- Gas estimation is essentially a binary search for the lowest gas amount that +prevents transaction failure. If validation consistently fails, so will the gas +estimation, as the system will continuously attempt to increase the gas limit. + +### Strategies for Providing Allowance Estimates + +1. **Rough Estimation**: If you have a general idea of the funds involved, use it +for the estimation. Minor differences won't typically cause transaction failure due +to the buffer already included in our estimates. However, discrepancies can occur +if the user's balance changes unexpectedly between estimation and transaction execution. + +2. **Separate Estimation for Allowance Setting**: Alternatively, estimate the gas +for a transaction where the user sets the allowance separately. Add this estimate +to the original transaction's estimated cost. This approach accounts for nonce +changes and general validation logic but may introduce significant overhead. + +Each method has its pros and cons, and choosing the right approach depends on the specific circumstances of the transaction and the paymaster's requirements. diff --git a/content/00.build/65.developer-reference/40.account-abstraction/60.signature-validation.md b/content/00.build/65.developer-reference/40.account-abstraction/60.signature-validation.md new file mode 100644 index 00000000..e2febaa3 --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/60.signature-validation.md @@ -0,0 +1,81 @@ +--- +title: Signature Validation +description: Recommended approaches to signature validation. +--- + +Your project can start preparing for native AA support. We highly encourage you to +do so, since it will allow you to onboard hundreds of thousands of users (many new +Wallets are smart accounts by default, providing way smoother experience for users). +We expect that in the future even more users will switch to smart wallets. + +One of the most notable differences between various types of accounts to be built +is different signature schemes. We expect accounts to support the [EIP-1271](https://eips.ethereum.org/EIPS/eip-1271) standard. + +The +[`@openzeppelin/contracts/utils/cryptography/SignatureChecker.sol`](https://github.com/OpenZeppelin/openzeppelin-contracts/blob/5ed5a86d1d22f387ce69ab4e0ace405de8bc888d/contracts/utils/cryptography/SignatureChecker.sol#L22) +library provides a way to verify signatures for different +account implementations. We strongly encourage you to use this library whenever you need to check that a signature of an account is correct. + +### Adding the library to your project + +::code-group + +```bash [npm] +npm add @openzeppelin/contracts +``` + +```bash [yarn] +yarn add @openzeppelin/contracts +``` + +```bash [pnpm] +pnpm add @openzeppelin/contracts +``` + +```bash [bun] +bun add @openzeppelin/contracts +``` + +:: + +### Example of using the library + +```solidity +pragma solidity ^0.8.0; + +import { SignatureChecker } from "@openzeppelin/contracts/utils/cryptography/SignatureChecker.sol"; + +contract TestSignatureChecker { + using SignatureChecker for address; + + function isValidSignature( + address _address, + bytes32 _hash, + bytes memory _signature + ) public pure returns (bool) { + return _address.isValidSignatureNow(_hash, _signature); + } +} +``` + +### Verifying AA signatures + +It is also **not recommended** to use `ethers.js` library to verify user signatures. + +Our SDK provides two methods with its `utils` to verify the signature of an account: + +```ts +export async function isMessageSignatureCorrect(address: string, message: ethers.Bytes | string, signature: SignatureLike): Promise<boolean>; + +export async function isTypedDataSignatureCorrect( + address: string, + domain: TypedDataDomain, + types: Record<string, Array<TypedDataField>>, + value: Record<string, any>, + signature: SignatureLike +): Promise<boolean>; +``` + +Currently these methods only support verifying ECDSA signatures, but very soon they will support EIP1271 signature verification as well. + +Both of these methods return `true` or `false` depending on whether the message signature is correct. diff --git a/content/00.build/65.developer-reference/40.account-abstraction/_dir.yml b/content/00.build/65.developer-reference/40.account-abstraction/_dir.yml new file mode 100644 index 00000000..d9010395 --- /dev/null +++ b/content/00.build/65.developer-reference/40.account-abstraction/_dir.yml @@ -0,0 +1 @@ +title: Account Abstraction diff --git a/content/00.build/65.developer-reference/50.era-contracts/10.l1-contracts.md b/content/00.build/65.developer-reference/50.era-contracts/10.l1-contracts.md new file mode 100644 index 00000000..f546d3ad --- /dev/null +++ b/content/00.build/65.developer-reference/50.era-contracts/10.l1-contracts.md @@ -0,0 +1,151 @@ +--- +title: L1 Contracts +description: +--- + +## Useful Addresses + +::content-switcher +--- +items: [{ + label: 'Mainnet', + partial: '_contract_addresses/_mainnet' +}, { + label: 'Sepolia', + partial: '_contract_addresses/_testnet' +}] +--- +:: + +## DiamondInit + +It is a one-function contract that implements the logic of initializing a diamond proxy. +It is called only once on the diamond constructor and is not saved in the diamond as a facet. + +Implementation detail - function returns a magic value just like it is designed in +[EIP-1271](https://eips.ethereum.org/EIPS/eip-1271), but the magic value is 32 bytes in size. + +## DiamondProxy + +This contract uses the [EIP-2535](https://eips.ethereum.org/EIPS/eip-2535) diamond +proxy pattern. + +It is an in-house implementation that is inspired by the [mudgen reference implementation](https://github.com/mudgen/Diamond). +It has no external functions, only the fallback that delegates a call to one of the facets (target/implementation contract). + +So even an upgrade system is a separate facet that can be replaced. + +One of the differences from the reference implementation is the ability to freeze access to the facet. + +Each of the facets has an associated parameter that indicates if it is possible to freeze access to the facet. + +Privileged actors can freeze the **diamond** (not a specific facet!) and all facets +with the marker `isFreezable` should be inaccessible until the governor unfreezes the diamond. + +## Diamond + +Technically, this L1 smart contract acts as a connector between Ethereum (L1) and zkSync (L2). +This contract checks the validity proof and data availability, handles +L2 <-> L1 communication, finalizes L2 state transition, and more. + +There are also important contracts deployed on the L2 that can also execute logic that we refer to as [System Contracts](system-contracts). +Using L2 <-> L1 communication can affect both the L1 and the L2. + +## ExecutorFacet + +A contract that accepts L2 blocks, enforces data availability and checks the validity of zk-proofs. + +The state transition is divided into three stages: + +- `commitBlocks` - check L2 block timestamp, process the L2 logs, save data for a block, and prepare data for zk-proof. +- `proveBlocks` - validate zk-proof. +- `executeBlocks` - finalize the state, marking L1 -> L2 communication processing, and saving Merkle tree with L2 logs. + +When a block is committed, we process L2 -> L1 logs. Here are the invariants that are expected there: + +- The only one L2 -> L1 log from the `L2_SYSTEM_CONTEXT_ADDRESS`, with the `key == l2BlockTimestamp` and `value == l2BlockHash`. +- Several (or none) logs from the `L2_KNOWN_CODE_STORAGE_ADDRESS` with the `key == bytecodeHash`, where bytecode is marked as a known factory dependency. +- Several (or none) logs from the `L2_BOOTLOADER_ADDRESS` with the `key == canonicalTxHash` where `canonicalTxHash` is a hash of processed L1 -> L2 transaction. +- Several (of none) logs from the `L2_TO_L1_MESSENGER` with the `key == hashedMessage` where `hashedMessage` is a hash of an arbitrary-length message +that is sent from L2. +- Several (or none) logs from other addresses with arbitrary parameters. + +## GettersFacet + +Separate facet, whose only function is providing `view` and `pure` methods. It also +implements [diamond loupe](https://eips.ethereum.org/EIPS/eip-2535#diamond-loupe) which makes managing facets easier. + +## MailboxFacet + +The facet that handles L2 <-> L1 communication, an overview for which can be found +in the [L1 / L2 Interoperability guide](/build/developer-reference/l1-l2-interoperability). + +The Mailbox only cares about transferring information from L2 to L1 and the other way but does not hold or transfer any assets (ETH, ERC20 tokens, or NFTs). + +L1 -> L2 communication is implemented as requesting an L2 transaction on L1 and +executing it on L2. This means a user can call the function on the L1 contract to +save the data about the transaction in some queue. Later on, a validator can +process such transactions on L2 and mark them as processed on the L1 priority queue. + +Currently, it is used only for sending information from L1 to L2 or implementing a +multi-layer protocol, but it is planned to use a priority queue for the +censor-resistance mechanism. Relevant functions for L1 -> L2 communication: `requestL2Transaction`/`l2TransactionBaseCost`/`serializeL2Transaction`. + +**NOTE**: For each executed transaction L1 -> L2, the system program necessarily sends an L2 -> L1 log. + +The semantics of such L2 -> L1 log are always: + +- sender = BOOTLOADER_ADDRESS. +- key = hash(L1ToL2Transaction). +- value = status of the processing transaction (1 - success & 0 for fail). +- isService = true (just a conventional value). +- l2ShardId = 0 (means that L1 -> L2 transaction was processed in a rollup shard, other shards are not available yet + anyway). +- txNumberInBlock = number of transactions in the block. + +L2 -> L1 communication, in contrast to L1 -> L2 communication, is based only on transferring the information, and not on the transaction execution on L1. + +From the L2 side, there is a special zkEVM opcode that saves `l2ToL1Log` in the L2 +block. A validator will send all `l2ToL1Logs` when sending an L2 block to the L1 +(see `ExecutorFacet`). Later on, users will be able to both read their `l2ToL1logs` on L1 and _prove_ that they sent it. + +From the L1 side, for each L2 block, a Merkle root with such logs in leaves is calculated. Thus, a user can provide Merkle proof for each `l2ToL1Logs`. + +_NOTE_: The `l2ToL1Log` structure consists of fixed-size fields! Because of this, +it is inconvenient to send a lot of data from L2 and to prove that they were sent +on L1 using only `l2ToL1log`. To send a variable-length message we use this trick: + +- One of the system contracts accepts an arbitrary-length message and sends a +fixed-length message with parameters `senderAddress == this`, `marker == true`, `key == msg.sender`, `value == keccak256(message)`. +- The contract on L1 accepts all sent messages and if the message came from this system contract it requires that the + preimage of `value` be provided. + +## ValidatorTimelock + +An intermediate smart contract between the validator EOA account and the zkSync smart contract. Its primary purpose is +to provide a trustless means of delaying batch execution without modifying the main zkSync contract. zkSync actively +monitors the chain activity and reacts to any suspicious activity by freezing the chain. This allows time for +investigation and mitigation before resuming normal operations. + +It is a temporary solution to prevent any significant impact of the validator hot key leakage, while the network is in +the Alpha stage. + +This contract consists of four main functions `commitBatches`, `proveBatches`, `executeBatches`, and `revertBatches`, +which can be called only by the validator. + +When the validator calls `commitBatches`, the same calldata will be propagated to the zkSync contract (`DiamondProxy` +through `call` where it invokes the `ExecutorFacet` through `delegatecall`), and also a timestamp is assigned to these +batches to track the time these batches are committed by the validator to enforce a delay between committing and +execution of batches. Then, the validator can prove the already committed batches regardless of the mentioned timestamp, +and again the same calldata (related to the `proveBatches` function) will be propagated to the zkSync contract. After +the `delay` is elapsed, the validator is allowed to call `executeBatches` to propagate the same calldata to zkSync +contract. + +The owner of the ValidatorTimelock contract is the same as the owner of the Governance contract - Matter Labs multisig. + +## Allowlist + +The auxiliary contract controls the permission access list. It is used in bridges and diamond proxies to control which +addresses can interact with them in the Alpha release. Currently, it is supposed to set all permissions to public. + +The owner of the Allowlist contract is the Governance contract. diff --git a/content/00.build/65.developer-reference/50.era-contracts/20.system-contracts.md b/content/00.build/65.developer-reference/50.era-contracts/20.system-contracts.md new file mode 100644 index 00000000..94ff803a --- /dev/null +++ b/content/00.build/65.developer-reference/50.era-contracts/20.system-contracts.md @@ -0,0 +1,318 @@ +--- +title: System Contracts +description: +--- + +While most of the primitive EVM opcodes can be supported out of the box (i.e. zero-value calls, +addition/multiplication/memory/storage management, etc), some of the opcodes are not supported by the VM by default and +they are implemented via “system contracts” — these contracts are located in a special _kernel space,_ i.e. in the +address space in range `[0..2^16-1]`, and they have some special privileges, which users’ contracts don’t have. These +contracts are pre-deployed at the genesis and updating their code can be done only via system upgrade, managed from L1. + +The use of each system contract will be explained down below. + +Most of the details on the implementation and the requirements for the execution of system contracts can be found in the +doc-comments of their respective code bases. This chapter serves only as a high-level overview of such contracts. + +All the codes of system contracts (including `DefaultAccount`s) are part of the protocol and can only be change via a +system upgrade through L1. + +The addresses and the interfaces of the L2 system contracts can be found [here](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/Constants.sol). + +## SystemContext + +This contract is used to support various system parameters not included in the VM by default, i.e. `chainId`, `origin`, +`ergsPrice`, `blockErgsLimit`, `coinbase`, `difficulty`, `baseFee`, `blockhash`, `block.number`, `block.timestamp.` + +It is important to note that the constructor is **not** run for system contracts upon genesis, i.e. the constant context +values are set on genesis explicitly. Notably, if in the future we want to upgrade the contracts, we will do it via +`ContractDeployer` and so the constructor will be run. + +This contract is also responsible for ensuring validity and consistency of batches, L2 blocks and virtual blocks. The +implementation itself is rather straightforward, but to better understand this contract, please take a look at the +[page](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20&%20L2%20blocks%20on%20zkSync.md) +about the block processing on zkSync. + +## AccountCodeStorage + +The code hashes of accounts are stored inside the storage of this contract. Whenever a VM calls a contract with address +`address` it retrieves the value under storage slot `address` of this system contract, if this value is non-zero, it +uses this as the code hash of the account. + +Whenever a contract is called, the VM asks the operator to provide the preimage for the codehash of the account. That is +why data availability of the code hashes is paramount. + +### Constructing vs Non-Constructing Code Hash + +In order to prevent contracts from being able to call a contract during its construction, we set the marker (i.e. second +byte of the bytecode hash of the account) as `1`. This way, the VM will ensure that whenever a contract is called +without the `isConstructor` flag, the bytecode of the default account (i.e. EOA) will be substituted instead of the +original bytecode. + +## BootloaderUtilities + +This contract contains some of the methods which are needed purely for the bootloader functionality but were moved out +from the bootloader itself for the convenience of not writing this logic in Yul. + +## DefaultAccount + +Whenever a contract that does **not** both: + +- belong to kernel space +- have any code deployed on it (the value stored under the corresponding storage slot in `AccountCodeStorage` is zero) + +The code of the default account is used. The main purpose of this contract is to provide EOA-like experience for both +wallet users and contracts that call it, i.e. it should not be distinguishable (apart of spent gas) from EOA accounts on +Ethereum. + +## Ecrecover + +The implementation of the ecrecover precompile. It is expected to be used frequently, so written in pure yul with a +custom memory layout. + +The contract accepts the calldata in the same format as EVM precompile, i.e. the first 32 bytes are the hash, the next +32 bytes are the v, the next 32 bytes are the r, and the last 32 bytes are the s. + +It also validates the input by the same rules as the EVM precompile: + +- The v should be either 27 or 28, +- The r and s should be less than the curve order. + +After that, it makes a precompile call and returns empty bytes if the call failed, and the recovered address otherwise. + +## Empty contracts + +Some of the contracts are relied upon to have EOA-like behaviour, i.e. they can be always called and get the success +value in return. An example of such address is 0 address. We also require the bootloader to be callable so that the +users could transfer ETH to it. + +For these contracts, we insert the `EmptyContract` code upon genesis. It is basically a noop code, which does nothing +and returns `success=1`. + +## SHA256 & Keccak256 + +Note that, unlike Ethereum, keccak256 is a precompile (_not an opcode_) on zkSync. + +These system contracts act as wrappers for their respective crypto precompile implementations. They are expected to be +used frequently, especially keccak256, since Solidity computes storage slots for mapping and dynamic arrays with its +help. That's why we wrote contracts on pure yul with optimizing the short input case. + +The system contracts accept the input and transform it into the format that the zk-circuit expects. This way, some of +the work is shifted from the crypto to smart contracts, which are easier to audit and maintain. + +Both contracts should apply padding to the input according to their respective specifications, and then make a +precompile call with the padded data. All other hashing work will be done in the zk-circuit. It's important to note that +the crypto part of the precompiles expects to work with padded data. This means that a bug in applying padding may lead +to an unprovable transaction. + +## L2EthToken & MsgValueSimulator + +Unlike Ethereum, zkEVM does not have any notion of any special native token. That’s why we have to simulate operations +with Ether via two contracts: `L2EthToken` & `MsgValueSimulator`. + +`L2EthToken` is a contract that holds the balances of ETH for the users. This contract does NOT provide ERC20 interface. +The only method for transferring Ether is `transferFromTo`. It permits only some system contracts to transfer on behalf +of users. This is needed to ensure that the interface is as close to Ethereum as possible, i.e. the only way to transfer +ETH is by doing a call to a contract with some `msg.value`. This is what `MsgValueSimulator` system contract is for. + +Whenever anyone wants to do a non-zero value call, they need to call `MsgValueSimulator` with: + +- The calldata for the call equal to the original one. +- Pass `value` and whether the call should be marked with `isSystem` in the first extra abi params. +- Pass the address of the callee in the second extraAbiParam. + +## KnownCodeStorage + +This contract is used to store whether a certain code hash is “known”, i.e. can be used to deploy contracts. On zkSync, +the L2 stores the contract’s code _hashes_ and not the codes themselves. Therefore, it must be part of the protocol to +ensure that no contract with unknown bytecode (i.e. hash with an unknown preimage) is ever deployed. + +The factory dependencies field provided by the user for each transaction contains the list of the contract’s bytecode +hashes to be marked as known. We can not simply trust the operator to “know” these bytecodehashes as the operator might +be malicious and hide the preimage. We ensure the availability of the bytecode in the following way: + +- If the transaction comes from L1, i.e. all its factory dependencies have already been published on L1, we can simply + mark these dependencies as “known”. +- If the transaction comes from L2, i.e. (the factory dependencies are yet to publish on L1), we make the user pays by + burning ergs proportional to the bytecode’s length. After that, we send the L2→L1 log with the bytecode hash of the + contract. It is the responsibility of the L1 contracts to verify that the corresponding bytecode hash has been + published on L1. + +It is the responsibility of the `ContractDeployer` system contract to deploy only +those code hashes that are known. + +The KnownCodesStorage contract is also responsible for ensuring that all the “known” bytecode hashes are also valid. + +## ContractDeployer & ImmutableSimulator + +`ContractDeployer` is a system contract responsible for deploying contracts on zkSync. It is better to understand how it +works in the context of how the contract deployment works on zkSync. Unlike Ethereum, where `create`/`create2` are +opcodes, on zkSync these are implemented by the compiler via calls to the ContractDeployer system contract. + +For additional security, we also distinguish the deployment of normal contracts and accounts. That’s why the main +methods that will be used by the user are `create`, `create2`, `createAccount`, `create2Account`, which simulate the +CREATE-like and CREATE2-like behavior for deploying normal and account contracts respectively. + +- ContractDeployer [Interface](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/IContractDeployer.sol) + +### **Address derivation** + +Each rollup that supports L1→L2 communications needs to make sure that the addresses of contracts on L1 and L2 do not +overlap during such communication (otherwise it would be possible that some evil proxy on L1 could mutate the state of +the L2 contract). Generally, rollups solve this issue in two ways: + +- XOR/ADD some kind of constant to addresses during L1→L2 communication. That’s how rollups closer to full + EVM-equivalence solve it, since it allows them to maintain the same derivation rules on L1 at the expense of contract + accounts on L1 having to redeploy on L2. +- Have different derivation rules from Ethereum. That is the path that zkSync has chosen, mainly because since we have + different bytecode than on EVM, CREATE2 address derivation would be different in practice anyway. + +You can see the rules for our address derivation in `getNewAddressCreate2`/ `getNewAddressCreate` methods in the +ContractDeployer. + +Note, that we still add a certain constant to the addresses during L1→L2 communication in order to allow ourselves some +way to support EVM bytecodes in the future. + +### **Deployment nonce** + +On Ethereum, the same nonce is used for CREATE for accounts and EOA wallets. On zkSync this is not the case, we use a +separate nonce called “deploymentNonce” to track the nonces for accounts. This was done mostly for consistency with +custom accounts and for having multicalls feature in the future. + +### **General process of deployment** + +- After incrementing the deployment nonce, the contract deployer must ensure that the bytecode that is being deployed is + available. +- After that, it puts the bytecode hash with a + [special constructing marker](#constructing-vs-non-constructing-code-hash) as code for the address of the + to-be-deployed contract. +- Then, if there is any value passed with the call, the contract deployer passes it to the deployed account and sets the + `msg.value` for the next as equal to this value. +- Then, it uses `mimic_call` for calling the constructor of the contract out of the name of the account. +- It parses the array of immutables returned by the constructor (we’ll talk about immutables in more details later). +- Calls `ImmutableSimulator` to set the immutables that are to be used for the deployed contract. + +Note how it is different from the EVM approach: on EVM when the contract is deployed, it executes the initCode and +returns the deployedCode. On zkSync, contracts only have the deployed code and can set immutables as storage variables +returned by the constructor. + +### **Constructor** + +On Ethereum, the constructor is only part of the initCode that gets executed during the deployment of the contract and +returns the deployment code of the contract. On zkSync, there is no separation between deployed code and constructor +code. The constructor is always a part of the deployment code of the contract. In order to protect it from being called, +the compiler-generated contracts invoke constructor only if the `isConstructor` flag provided (it is only available for +the system contracts). + +After execution, the constructor must return an array of: + +```solidity +struct ImmutableData { + uint256 index; + bytes32 value; +} + +``` + +basically denoting an array of immutables passed to the contract. + +### **Immutables** + +Immutables are stored in the `ImmutableSimulator` system contract. The way how `index` of each immutable is defined is +part of the compiler specification. This contract treats it simply as mapping from index to value for each particular +address. + +Whenever a contract needs to access a value of some immutable, they call the +`ImmutableSimulator.getImmutable(getCodeAddress(), index)`. Note that on zkSync it is possible to get the current +execution address. + +### **Return value of the deployment methods** + +If the call succeeded, the address of the deployed contract is returned. If the deploy fails, the error bubbles up. + +## DefaultAccount + +The implementation of the default account abstraction. This is the code that is used by default for all addresses that +are not in kernel space and have no contract deployed on them. This address: + +- Contains minimal implementation of our account abstraction protocol. Note that it supports the + [built-in paymaster flows](/build/developer-reference/account-abstraction/paymasters). +- When anyone (except bootloader) calls it, it behaves in the same way as a call to an EOA, i.e. it always returns + `success = 1, returndatasize = 0` for calls from anyone except for the bootloader. + +## L1Messenger + +A contract used for sending arbitrary length L2→L1 messages from zkSync to L1. While zkSync natively supports a rather +limited number of L1→L2 logs, which can transfer only roughly 64 bytes of data a time, we allowed sending +nearly-arbitrary length L2→L1 messages with the following trick: + +The L1 messenger receives a message, hashes it and sends only its hash as well as the original sender via L2→L1 log. +Then, it is the duty of the L1 smart contracts to make sure that the operator has provided full preimage of this hash in +the commitment of the batch. + +The `L1Messenger` is also responsible for validating the total pubdata to be sent on L1. You can read more about it +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). + +- L1Messenger [Interface](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/IL1Messenger.sol) + +## NonceHolder + +Serves as storage for nonces for our accounts. Besides making it easier for operator to order transactions (i.e. by +reading the current nonces of account), it also serves a separate purpose: making sure that the pair (address, nonce) is +always unique. + +It provides a function `validateNonceUsage` which the bootloader uses to check whether the nonce has been used for a +certain account or not. Bootloader enforces that the nonce is marked as non-used before validation step of the +transaction and marked as used one afterwards. The contract ensures that once marked as used, the nonce can not be set +back to the “unused” state. + +Note that nonces do not necessarily have to be monotonic (this is needed to support more interesting applications of +account abstractions, e.g. protocols that can start transactions on their own, tornado-cash like protocols, etc). That’s +why there are two ways to set a certain nonce as “used”: + +- By incrementing the `minNonce` for the account (thus making all nonces that are lower than `minNonce` as used). +- By setting some non-zero value under the nonce via `setValueUnderNonce`. This way, this key will be marked as used and + will no longer be allowed to be used as nonce for accounts. This way it is also rather efficient, since these 32 bytes + could be used to store some valuable information. + +The accounts upon creation can also provide which type of nonce ordering do they want: Sequential (i.e. it should be +expected that the nonces grow one by one, just like EOA) or Arbitrary, the nonces may have any values. This ordering is +not enforced in any way by system contracts, but it is more of a suggestion to the operator on how it should order the +transactions in the mempool. + +- NonceHolder [Interface](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/INonceHolder.sol) + +## EventWriter + +A system contract responsible for emitting events. + +It accepts in its 0-th extra abi data param the number of topics. In the rest of the extraAbiParams he accepts topics +for the event to emit. Note, that in reality the event the first topic of the event contains the address of the account. +Generally, the users should not interact with this contract directly, but only through Solidity syntax of `emit`-ing new +events. + +## Compressor + +One of the most expensive resource for a rollup is data availability, so in order to reduce costs for the users we +compress the published pubdata in several ways: + +- We compress published bytecodes. +- We compress state diffs. + +This contract contains utility methods that are used to verify the correctness of either bytecode or state diff +compression. You can read more on how we compress state diffs and bytecodes in the corresponding +[document](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +#### Protected access to some of the system contracts + +Some of the system contracts have an impact on the account that may not be expected +on Ethereum. For instance, on Ethereum the only way an EOA could increase its nonce +is by sending a transaction. Also, sending a transaction could only increase nonce +by 1 at a time. On zkSync nonces are implemented via the [NonceHolder](#nonceholder) system contract and, if naively implemented, the users could be +allowed to increment their nonces by calling this contract. That's why the calls to +most of the non-view methods of the nonce holder were restricted to be called only +with a special `isSystem` flag, so that interactions with important system contracts could be consciously managed by the developer of the account. + +The same applies to the `ContractDeployer` system contract. This +means that, for instance, you would need to explicitly allow your users to deploy +contracts, as it is done in the DefaultAccount's [implementation](https://github.com/matter-labs/era-contracts/blob/6250292a98179cd442516f130540d6f862c06a16/system-contracts/contracts/DefaultAccount.sol#L125). diff --git a/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_mainnet.md b/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_mainnet.md new file mode 100644 index 00000000..a330a446 --- /dev/null +++ b/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_mainnet.md @@ -0,0 +1,15 @@ +--- +title: Mainnet contract addresses +--- + +**L1 Mainnet Contract Addresses:** + +- **DiamondInit:** [0xb91d905A698c28b73C61aF60C63919b754FCF4DE](https://etherscan.io/address/0xb91d905A698c28b73C61aF60C63919b754FCF4DE#code) +- **DiamondProxy:** [0x32400084c286cf3e17e7b677ea9583e60a000324](https://etherscan.io/address/0x32400084c286cf3e17e7b677ea9583e60a000324#code) +- **DiamondUpgrade:** [0xe79a6d29bB0520648F25D11d65e29FB06B195F0F](https://etherscan.io/address/0xe79a6d29bB0520648F25D11d65e29FB06B195F0F#code) +- **ExecutorFacet:** [0xD059478a564dF1353A54AC0D0e7Fc55A90b92246](https://etherscan.io/address/0xD059478a564dF1353A54AC0D0e7Fc55A90b92246#code) +- **GettersFacet:** [0xF3ACF6a03ea4a914B78Ec788624B25ceC37c14A4](https://etherscan.io/address/0xF3ACF6a03ea4a914B78Ec788624B25ceC37c14A4#code) +- **Verifier:** [0xB465882F67d236DcC0D090F78ebb0d838e9719D8](https://etherscan.io/address/0xB465882F67d236DcC0D090F78ebb0d838e9719D8#code) +- **MailboxFacet:** [0x63b5EC36B09384fFA7106A80Ec7cfdFCa521fD08](https://etherscan.io/address/0x63b5EC36B09384fFA7106A80Ec7cfdFCa521fD08#code) +- **ValidatorTimelock:** [0xa8cb082a5a689e0d594d7da1e2d72a3d63adc1bd](https://etherscan.io/address/0xa8cb082a5a689e0d594d7da1e2d72a3d63adc1bd#code) +- **AllowList:** [0x0C0dC1171258694635AA50cec5845aC1031cA6d7](https://etherscan.io/address/0x0C0dC1171258694635AA50cec5845aC1031cA6d7#code) diff --git a/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_testnet.md b/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_testnet.md new file mode 100644 index 00000000..17738d1d --- /dev/null +++ b/content/00.build/65.developer-reference/50.era-contracts/_contract_addresses/_testnet.md @@ -0,0 +1,15 @@ +--- +title: Testnet contract addresses +--- + +**L1 Testnet Contract Addresses:** + +- **DiamondInit:** [0x457701fDC6CaBc7D2EfB9b85f7faB0EE4bBD3c36](https://sepolia.etherscan.io/address/0x457701fDC6CaBc7D2EfB9b85f7faB0EE4bBD3c36#code) +- **DiamondProxy:** [0x9a6de0f62Aa270A8bCB1e2610078650D539B1Ef9](https://sepolia.etherscan.io/address/0x9a6de0f62Aa270A8bCB1e2610078650D539B1Ef9#code) +- **DiamondUpgrade:** [0xA6b2731c08385782fBaCfCcD63D3c7fc7b798E47](https://sepolia.etherscan.io/address/0xA6b2731c08385782fBaCfCcD63D3c7fc7b798E47#code) +- **ExecutorFacet:** [0xe6cc1455217a8BBCF2c663607A0b8c200B8732F1](https://sepolia.etherscan.io/address/0xe6cc1455217a8BBCF2c663607A0b8c200B8732F1#code) +- **GettersFacet:** [0x10f328c20dD2469b7e88f374B9794471599c1c8D](https://sepolia.etherscan.io/address/0x10f328c20dD2469b7e88f374B9794471599c1c8D#code) +- **Verifier:** [0xf07ea72e071bc21612449570C365Ff3DC9176Ecb](https://sepolia.etherscan.io/address/0xf07ea72e071bc21612449570C365Ff3DC9176Ecb#code) +- **MailboxFacet:** [0x2ed8eF54a16bBF721a318bd5a5C0F39Be70eaa65](https://sepolia.etherscan.io/address/0x2ed8eF54a16bBF721a318bd5a5C0F39Be70eaa65#code) +- **ValidatorTimelock:** [0x8CaC0a609A314E4161b8070cdEe065060B2486A1](https://sepolia.etherscan.io/address/0x8CaC0a609A314E4161b8070cdEe065060B2486A1#code) +- **AllowList:** [0x7546a21cd4D74fc98Ef1A50145dfd8c043e2096F](https://sepolia.etherscan.io/address/0x7546a21cd4D74fc98Ef1A50145dfd8c043e2096F#code) diff --git a/content/00.build/65.developer-reference/60.bridging-assets.md b/content/00.build/65.developer-reference/60.bridging-assets.md new file mode 100644 index 00000000..81c40d0b --- /dev/null +++ b/content/00.build/65.developer-reference/60.bridging-assets.md @@ -0,0 +1,79 @@ +--- +title: Bridging Assets +description: +--- + +Users can deposit and withdraw assets from zkSync Era using any of the [multiple bridges](https://zksync.io/explore#bridges). + +Under the hood, bridging is implemented by having two contracts +(one deployed to L1, and the second deployed to L2) +communicating with each other using [L1 <-> L2 interoperability](l1-l2-interoperability). + +Developers are free to build their own [custom bridge for any token](#custom-bridges-on-l1-and-l2) however, we provide default bridges +(one for ETH and one for ERC20 tokens), which can be used for basic bridging. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} + +Addresses of tokens on L2 will always differ from the same token L1 address. Also +note, that tokens bridged via the default bridge only support standard ERC20 functionality, i.e. rebase tokens and other custom behavior are not supported. + +:: + +## Default bridges + +You can get the default bridge addresses using the [`zks_getBridgeContracts`](/build/api-reference/zks-rpc#zks_getbridgecontracts) endpoint or +`getDefaultBridgeAddresses` method of `Provider`. Similar methods are available in the other SDKs. + +### Deposits (to L2) + +Users must call the `deposit` method on the L1 bridge contract, which triggers the following actions: + +- The user's L1 tokens will be sent to the L1 bridge and become locked there. +- The L1 bridge initiates a transaction to the L2 bridge using L1 -> L2 communication. +- Within the L2 transaction, tokens will be minted and sent to the specified address on L2. + - If the token does not exist on zkSync yet, a new contract is deployed for it. + Given the L2 token address is deterministic (based on the original L1 address, + name and symbol), it doesn't matter who is the first person bridging it, the new L2 address will be the same. +- For every executed L1 -> L2 transaction, there will be an L2 -> L1 log message confirming its execution. +- Lastly, the `finalizeDeposit`method is called and it finalizes the deposit and mints funds on L2. + +You can find example scripts to deposit ETH and ERC20 tokens using the default bridges in the how-to section of the docs. + +### Withdrawals (to L1) + +::callout{icon="i-heroicons-light-bulb"} + +- To provide additional security during the Alpha phase, **withdrawals in zkSync Era take 24 hours**. + +- For more information, read the [withdrawal delay guide](/build/resources/withdrawal-delay). + +:: + +Users must call the `withdraw` method on the L2 bridge contract, which will trigger the following actions: + +- L2 tokens will be burned. +- An L2 -> L1 message with the information about the withdrawal will be sent. +- After that, the withdrawal action will be available to be finalized by anyone in +the L1 bridge (by proving the inclusion of the L2 -> L1 message, which is done when calling the `finalizeWithdrawal` method on the L1 bridge contract). +- After the method is called, the funds are unlocked from the L1 bridge and sent to the withdrawal recipient. + +On the testnet environment, we automatically finalize all withdrawals, i.e., for +every withdrawal, we will take care of it by making an L1 transaction that proves the inclusion for each message. + +## Custom bridges on L1 and L2 + +To build a custom bridge, create a regular Solidity contract which extends one of +the interfaces mentioned below for the layer. The interfaces provide access to the zkSync Era SDK deposit and withdraw implementations. + +- L1: [IL1Bridge.sol](https://github.com/matter-labs/era-contracts/blob/main/l1-contracts/contracts/bridge/interfaces/IL1Bridge.sol) + + For more information, check out our example [L1 custom bridge implementation](https://github.com/matter-labs/era-contracts/blob/main/l1-contracts/contracts/bridge/L1ERC20Bridge.sol). + +- L2: [IL2Bridge.sol](https://github.com/matter-labs/era-contracts/blob/main/l1-contracts/contracts/bridge/interfaces/IL2Bridge.sol) + + For more information, check out our example [L2 custom bridge implementation](https://github.com/matter-labs/era-contracts/blob/main/l2-contracts/contracts/bridge/L2ERC20Bridge.sol). + +## Adding Tokens to the Bridge UI + +No action is required to add tokens to the bridge UI. All tokens are automatically +recognized based on user balances. If you desire for your token to display an icon or price, refer to the Token Listing Guide. diff --git a/content/00.build/65.developer-reference/70.fee-model.md b/content/00.build/65.developer-reference/70.fee-model.md new file mode 100644 index 00000000..d256229c --- /dev/null +++ b/content/00.build/65.developer-reference/70.fee-model.md @@ -0,0 +1,137 @@ +--- +title: Fee Model +description: Overview of zkSync Era's fee model. +--- + +zkSync Era's fee model is similar to Ethereum’s where `gas` is charged for +computational cost, cost of publishing data on-chain and storage effects. However, +zkSync Era includes additional costs for publishing to L1 and for proof generation. + +Because the L1 gas price for publishing data (on L1) is so volatile, the amount of required L2 `gas` is variable. +Therefore, for each block, the zkSync Era sequencer defines the following dynamic parameters: + +- `gasPrice`: the price, in gwei, of a unit of gas. +- `gasPerPubdata`: the amount of `gas` for publishing one byte of data on Ethereum. + +In zkSync Era, unlike in Ethereum where each opcode has a fixed gas price, storage +write charges remain dynamic due to the fluctuation of gas price on L1. Other +opcode prices are constant, similar to Ethereum. See the [zkSync opcode documentation](https://github.com/matter-labs/era-zkevm_opcode_defs/blob/9307543b9ca51bd80d4f5c85d6eb80efd8b19bb2/src/lib.rs#L227) +for an idea of how we calculate them. + +Like Ethereum, the most costly operation is a storage update. Execution of +arithmetic operations is relatively cheap, as it involves computation alone and no storage changes. + +## State diffs vs transaction inputs + +A considerable advantage we have over optimistic and most ZK rollups is that, +instead of publishing all transaction data to L1, zkSync Era only publishes state diffs, thus publishing significantly less data to L1. + +#### State diff example + +If an oracle updates a price in a contract using the same storage slot 10 times in +the same rollup batch, only the final update is published on Ethereum and is therefore only charged once, making 9 of the 10 updates free. + +Another advantage is the cost-effective contract redeployment. An example is a DEX +with a `PairFactory` contract for different `Pair` pools. The contract bytecode of +`Pair` is only published when the first instance is deployed. After that, subsequent deployments only involve updating one storage slot which sets the +contract code hash on the newly deployed `Pair` address. + +## Design recommendations + +- **Update storage slots as little as possible:** Check to see if your code can avoid unnecessary storage updates. +- **Reuse as many storage slots as possible:** Only the final state diff is published on Ethereum. +- **Reuse the contract code where possible:** + - On Ethereum, avoiding constructor parameters and putting them into constants reduces some of the gas costs upon contract deployment. + - On zkSync Era the opposite is true: as contract bytecode is only published + once, updating the constructor parameters alone leads to substantial fee savings. + +## Gas estimation for transactions + +Ethereum has a constant of `21000` gas that covers the intrinsic costs of +processing a transaction, i.e. checking the signature and updating the nonce for the account. + +On zkSync Era this varies because we support custom and paymaster accounts. These +accounts require a (usually) higher amount of gas than EOAs. zkSync Era provides +functions for estimating the cost of a transaction regardless of the type of account. + +The transaction fee estimate depends on the entire transaction flow, including +validation and execution. The `eth_estimateGas` function uses binary search to find the smallest gas value under which the transaction succeeds. + +For any Rust developers interested in the zkSync Era implementation for gas estimation, see the [Rust code in our repo](https://github.com/matter-labs/zksync-era/blob/48fe6e27110c1fe1a438c5375fb256890e8017b1/sdk/zksync-rs/src/operations/execute_contract.rs#L129). + +### Transaction length + +zkSync Era publishes state diffs on-chain. The cost of the transaction, however, +may still depend on transaction length because the sequencer stores long transactions in-memory. + +Long transactions incur additional costs during interactions with an account. +zkSync Era works with different types of accounts and, therefore, the protocol +cannot make assumptions about signature length. Furthermore, given that a signature +(and thus its length) is unavailable at the time of fee estimation, we cannot +precisely estimate the cost of such a transaction. To mitigate this, we multiply +the recommended cost of the transaction by a small percentage. + +### `DefaultAccount` + +By default, the zkSync Era sequencer provides a transaction structure with the +available information during the fee estimation. + +Because the signature is unavailable prior to the transaction taking place, an +invalid 65-byte ECDSA signature is used instead. The `DefaultAccount` (used by +EOAs), during gas fee estimation, executes many operations, including signature +verification, and returns only `bytes4(0)` instead of magic. + +In the case of a custom account with multiple signers, the account may wish to +simulate signature validation for all the provided signers. + +See the [DefaultAccount code](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/DefaultAccount.sol) for more information. + +### Account abstraction considerations + +The `validateTransaction` function for account abstraction, and the +`validateAndPayForPaymasterTransaction` function for paymasters, always attempt to +run using the same amount of computation, including storage access, regardless of whether the transaction is successful or not. + +See the documentation on [account abstraction](/build/developer-reference/account-abstraction) for more detailed information. + +#### `validateTransaction` + +- `validateTransaction` is considered successful when it does not revert (i.e. it returns `success = true`) and also returns the magic string. +- For invalid signatures, the function does not revert. It instead returns invalid magic so the function is unsuccessful. + +#### `eth_estimateGas` + +Because the entire transaction validation and execution flow is simulated in order +to get the transaction’s fee, the user needs to have sufficient funds in their +account, otherwise the simulation may exit. This means that, to ensure the execution progresses, the zkSync Era sequencer adds the necessary balance, +temporarily, to the user’s account; specifically the sequencer increases the account balance by tx.maxFeePerGas \* tx.gasLimit. + +This ensures the `DefaultAccount`’s `payForTransaction` function runs successfully. + +This is different to the Geth implementation which uses `tx.gasprice = 0` to make +sure that the user can pay the fee even though the `tx.origin` in the simulation may not have any balance at all. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Due to this, custom accounts may unexpectedly contain more balance than they have on-chain during the validation step, which may affect their behavior. +:: + +## Refunds + +A gas estimate may be higher than the actual cost of the transaction. This means users usually only spend a portion of the estimated transaction cost. + +The refund, therefore, returns the unpaid transaction fee portion to the user. + +- Only one transaction is recorded on the block, even if a portion of the original estimate is refunded. +- Users can compare their token balance against the transaction cost on the block explorer to verify they did not overspend. +- Users may see no notification in their wallet depending on which wallet they use. + +Refunds are calculated by defining a fair value for the amount the user spent on the transaction and subtracting it from the actual spend. + +## Out-of-gas errors + +Unlike on Geth, it is impossible to track out-of-gas errors on zkSync Era. + +The main reason is that the “actual” execution happens inside the `DefaultAccount` +system contract and, due to the [63/64 rule](https://eips.ethereum.org/EIPS/eip-150), +when a high amount of gas is provided, the call to the `execute` function +of the `DefaultAccount` will NOT fail, even if it is out of gas, although the subcall to the `transaction.to` contract will fail with an out of gas error. diff --git a/content/00.build/65.developer-reference/80.l1-l2-interoperability.md b/content/00.build/65.developer-reference/80.l1-l2-interoperability.md new file mode 100644 index 00000000..1aa42f7a --- /dev/null +++ b/content/00.build/65.developer-reference/80.l1-l2-interoperability.md @@ -0,0 +1,46 @@ +--- +title: L1-L2 Interoperability +description: +--- + +## Common use cases + +Many use cases require multi-layer interoperability, such as: + +- The network's censorship resistance. +- Custom bridges. +- Multi-layer governing smart contracts. +- Multi-layer transfers. + +## L1 to L2 communication + +L1 to L2 communication is governed by the +[`IZkSync.sol`](https://github.com/matter-labs/era-contracts/blob/main/l1-contracts/contracts/zksync/interfaces/IZkSync.sol) inherited interfaces. + +### Gas estimation + +The SDK processes gas estimation for transactions implicitly. However, it is also possible to implement the gas estimation processes explicitly. + +#### L1 to L2 gas estimation for transactions + +- Basic costs are measured in the amount of gas, and so the final cost depends on +the gas price that the transaction assigns. The transaction process requires the +current L1 gas price, transaction base cost, and transaction gas limit which defines the maximum amount of gas a transaction can consume. + +<!-- TODO: add tutorial reference links here --> + +## L2 to L1 + +L2 to L1 communication is based on transferring the data as a message, and not on L1 transaction execution. + +<!-- TODO: add tutorial reference links here --> + +## Priority queue + +1. All transaction types are supported by the priority queue. + +2. The priority queue must be fully permissionless to prevent malicious activity. +For example, malicious users might send multiple transactions which push up the +block gas limit to unworkable levels. To mitigate against this, submitting +transactions to the priority queue is no longer free and users must pay a fee to +the operator. diff --git a/content/00.build/65.developer-reference/_dir.yml b/content/00.build/65.developer-reference/_dir.yml new file mode 100644 index 00000000..f0fd110b --- /dev/null +++ b/content/00.build/65.developer-reference/_dir.yml @@ -0,0 +1 @@ +title: Developer Reference diff --git a/content/00.build/70.api-reference/00.index.md b/content/00.build/70.api-reference/00.index.md new file mode 100644 index 00000000..3fbca3e9 --- /dev/null +++ b/content/00.build/70.api-reference/00.index.md @@ -0,0 +1,63 @@ +--- +title: Overview +description: Explore the comprehensive guide to the zkSync Era JSON-RPC API, offering seamless Ethereum integration and advanced Layer 2 functionalities for developers. +--- + +Welcome to the zkSync Era API reference documentation! This page provides you with a high-level overview of our API capabilities and essential information. + +zkSync Era seamlessly integrates with the Ethereum ecosystem. To achieve this integration, +we support not only the standard <a href="https://ethereum.org/en/developers/docs/apis/json-rpc/" target="_blank">Ethereum JSON-RPC API</a> +but also introduce L2-specific features that enhance functionality. + +::callout{icon="i-heroicons-information-circle" color="amber"} +To ensure a seamless experience, we impose rate limits on both HTTPS and WebSocket APIs. +Generally, these limits are ample, ranging from 10 to 100 requests per second (RPS) per client. +:: + +### Mainnet + +:display-partial{ path="_partials/_mainnet-network-details" } + +### Testnet + +:display-partial{ path="_partials/_testnet-network-details" } + +## API Collections + +Explore our curated collections of API endpoints tailored for every need, from seamless Ethereum integrations to advanced debugging tools. +Embrace the full potential of zkSync Era and elevate your dApps to new heights. Discover, integrate, and innovate with our robust API offerings. + +::card-group + ::card + --- + title: Ethereum JSON-RPC API + icon: i-simple-icons-ethereum + to: /build/api-reference/ethereum-rpc + --- + Integrate effortlessly with full compatibility for the Ethereum JSON-RPC API. + :: + ::card + --- + title: zkSync JSON-RPC API + icon: i-zksync-zksync-logo + to: /build/api-reference/zks-rpc + --- + Unlock Layer 2 capabilities with our dedicated zkSync JSON-RPC API. + :: + ::card + --- + title: Debugging JSON-RPC API + icon: i-heroicons-code-bracket-16-solid + to: /build/api-reference/debug-rpc + --- + Simplify your development process with powerful debugging tools. + :: + ::card + --- + title: PubSub JSON-RPC + icon: i-heroicons-signal-solid + to: /build/api-reference/pub-sub-rpc + --- + Stay informed with real-time event subscriptions. + :: +:: diff --git a/content/00.build/70.api-reference/10.conventions.md b/content/00.build/70.api-reference/10.conventions.md new file mode 100644 index 00000000..69e237cd --- /dev/null +++ b/content/00.build/70.api-reference/10.conventions.md @@ -0,0 +1,57 @@ +--- +title: Conventions +description: Formatting conventions and references for use with zkSync Era API docs. +--- + +## Hex value encoding + +Two key data types get passed over JSON: unformatted byte arrays and quantities. +Both are passed with a hex encoding but with different requirements for formatting. + +### Quantities + +When encoding quantities (integers, numbers): encode as hex, prefix with "0x", +the most compact representation (slight exception: zero should be represented as "0x0"). + +Here are some examples: + +- 0x41 (65 in decimal) +- 0x400 (1024 in decimal) +- WRONG: 0x (should always have at least one digit - zero is "0x0") +- WRONG: 0x0400 (no leading zeroes allowed) +- WRONG: ff (must be prefixed 0x) + +### Unformatted data +When encoding unformatted data (byte arrays, account addresses, hashes, bytecode arrays): +encode as hex, prefix with "0x", two hex digits per byte. + +Here are some examples: + +- 0x41 (size 1, "A") +- 0x004200 (size 3, "\0B\0") +- 0x (size 0, "") +- WRONG: 0xf0f0f (must be even number of digits) +- WRONG: 004200 (must be prefixed 0x) + +## Error Codes + +| **Category** | **Error Code** | **Message** | **Description** | +|--------------|----------------|--------------------|-----------------------------------------------------------| +| Standard | -32700 | Parse error | The JSON payload could not be parsed due to invalid syntax. | +| Standard | -32600 | Invalid request | The JSON object is not a valid request structure. | +| Standard | -32601 | Method not found | The requested method does not exist or is not available. | +| Standard | -32602 | Invalid params | The parameters provided to the method are invalid or malformed. | +| Standard | -32603 | Internal error | An unspecified internal error occurred within the JSON-RPC framework. | + +## The default block parameter + +When requests are made that act on the state of Ethereum, the last default block parameter determines the height of the block. + +The following options are possible for the defaultBlock parameter: + +- HEX String - an integer block number +- String "earliest" for the earliest/genesis block +- String "latest" - for the latest mined block +- String "safe" - for the latest safe head block +- String "finalized" - for the latest finalized block +- String "pending" - for the pending state/transactions diff --git a/content/00.build/70.api-reference/20.zks-rpc.md b/content/00.build/70.api-reference/20.zks-rpc.md new file mode 100644 index 00000000..a2b20199 --- /dev/null +++ b/content/00.build/70.api-reference/20.zks-rpc.md @@ -0,0 +1,1368 @@ +--- +title: ZKs JSON-RPC API +description: Overview of the JSON-RPC API methods specific to zkSync Era, detailing operations and functionalities within the zkSync Era ecosystem. +github: https://github.com/matter-labs/zksync-era/blob/main/core/lib/web3_decl/src/namespaces/zks.rs +--- + +zkSync Era provides a suite of JSON-RPC API methods designed for seamless interaction with its ecosystem. +These methods offer developers the tools needed to integrate their applications with zkSync Era's features, +enhancing the capability to perform transactions, query network data, and interact with smart contracts efficiently. + +## `zks_estimateFee` + +Estimates the fee for a given call request. + +#### Parameters + +1. :display-partial{path="build/api-reference/_partials/_call-request-params"} + +#### Returns + +The method returns an object containing the estimated gas and fee details for the given call request. + +- **gas_limit**: QUANTITY, 32 bytes - The maximum amount of gas that can be used. +- **max_fee_per_gas**: QUANTITY, 32 bytes - The maximum fee per unit of gas that the sender is willing to pay. +- **max_priority_fee_per_gas**: QUANTITY, 32 bytes - The maximum priority fee per unit of gas to incentivize miners. +- **gas_per_pubdata_limit**: QUANTITY, 32 bytes - The gas limit per unit of public data. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "zks_estimateFee", + "params": [ + { + "from": "0x1111111111111111111111111111111111111111", + "to": "0x2222222222222222222222222222222222222222", + "data": "0xffffffff" + } + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "gas_limit": "0x1ea9a9", + "max_fee_per_gas": "0x17d7840", + "max_priority_fee_per_gas": "0x0", + "gas_per_pubdata_limit": "0x5340" + }, + "id": 2 +} +``` + +--- + +## `zks_estimateGasL1ToL2` + +Estimates the gas required for an L1 to L2 transaction. + +#### Parameters + +1. :display-partial{path="build/api-reference/_partials/_call-request-params"} + +#### Returns + +**QUANTITY, 32 bytes** - The estimated gas amount in hexadecimal format, representing the number of gas units required. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "zks_estimateGasL1ToL2", + "params": [ + { + "from": "0x1111111111111111111111111111111111111111", + "to": "0x2222222222222222222222222222222222222222", + "data": "0xffffffff" + } + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x25f64db", + "id": 2 +} +``` + +--- + +<!-- // TODO: @dutterbutter +// Return to this method once its live in next upgrade. --> +## `zks_getBridgehubContract` + +Retrieves the bridge hub contract address. + +#### Parameters + +None + +#### Returns + +**DATA, 20 bytes** - a single string value representing the bridge hub contract address. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getBridgehubContract", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": null, + "id": 1 +} +``` + +--- + +## `zks_getMainContract` + +Retrieves the main contract address. + +#### Parameters + +None + +#### Returns + +**DATA, 20 bytes** - address of the main contract. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getMainContract", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x32400084c286cf3e17e7b677ea9583e60a000324", + "id": 1 +} +``` + +--- + +## `zks_getTestnetPaymaster` + +Retrieves the testnet paymaster address, specifically for interactions within the %%zk_testnet_name%% environment. +**Note: This method is only applicable for %%zk_testnet_name%%.** + +#### Parameters + +None + +#### Returns + +**DATA, 20 bytes** - address of the testnet paymaster. + +#### Example Request + +```sh +curl --request POST \ + --url %%zk_testnet_rpc_url%% \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getTestnetPaymaster", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x3cb2b87d10ac01736a65688f3e0fb1b070b3eea3", + "id": 1 +} +``` + +--- + +## `zks_getBridgeContracts` + +Retrieves the addresses of canonical bridge contracts for zkSync Era. + +#### Parameters + +None + +#### Returns + +Object containing the addresses of bridge contracts. + +- **l1Erc20DefaultBridge**: DATA, 20 bytes - address of the default ERC-20 bridge on Layer 1. +- **l2Erc20DefaultBridge**: DATA, 20 bytes - address of the default ERC-20 bridge on Layer 2. +- **l1WethBridge**: DATA, 20 bytes - address of the Wrapped Ethereum (WETH) bridge on Layer 1. +- **l2WethBridge**: DATA, 20 bytes - address of the Wrapped Ethereum (WETH) bridge on Layer 2. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getBridgeContracts", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "l1Erc20DefaultBridge": "0x57891966931eb4bb6fb81430e6ce0a03aabde063", + "l2Erc20DefaultBridge": "0x11f943b2c77b743ab90f4a0ae7d5a4e7fca3e102", + "l1WethBridge": "0x0000000000000000000000000000000000000000", + "l2WethBridge": "0x0000000000000000000000000000000000000000" + }, + "id": 1 +} +``` + +--- + +## `zks_L1ChainId` + +Retrieves the L1 chain ID. + +#### Parameters + +None + +#### Returns + +**QUANTITY, 8 bytes** - The hexadecimal representation of the L1 chain ID. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_L1ChainId", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x1", + "id": 1 +} +``` + +--- + +## `zks_getConfirmedTokens` + +Lists confirmed tokens. **Confirmed** in the method name means any token bridged to zkSync Era via the official bridge. + +The tokens are returned in alphabetical order by their symbol. This means the token id is its +position in an alphabetically sorted array of tokens. + +#### Parameters + +1. **uint32** - token id from which to start. +1. **uint8** - maximum number of tokens to list. + +#### Returns + +**Array** of token objects, each containing details about a specific confirmed token. + +- **l1Address**: DATA, 20 bytes - Layer 1 Ethereum address of the token. +- **l2Address**: DATA, 20 bytes - Layer 2 zkSync Era address of the token. +- **name**: String - name of the token. +- **symbol**: String - symbol of the token. +- **decimals**: uint8 - number of decimals the token uses. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getConfirmedTokens", + "params": [1, 3] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "l1Address": "0xb6ed7644c69416d67b522e20bc294a9a9b405b31", + "l2Address": "0xfea352c0d005a1e4ac7e092ef14fca18b8e6c8fd", + "name": "0xBitcoin Token", + "symbol": "0xBTC", + "decimals": 8 + }, + { + "l1Address": "0x111111111117dc0aa78b770fa6a738034120c302", + "l2Address": "0x3f0b8b206a7fbdb3ecfc08c9407ca83f5ab1ce59", + "name": "1INCH Token", + "symbol": "1INCH", + "decimals": 18 + }, + { + "l1Address": "0xb50721bcf8d664c30412cfbc6cf7a15145234ad1", + "l2Address": "0xd5428b08b604727c43ba5a37eed25a289978d081", + "name": "Arbitrum", + "symbol": "ARB", + "decimals": 18 + } + ], + "id": 1 +} +``` + +--- + +## `zks_getAllAccountBalances` + +Gets all account balances for a given address. + +#### Parameters + +1. **DATA, 20 bytes** - account address. + +#### Returns + +The method returns an object with token addresses as keys and their corresponding +balances as values. Each key-value pair represents the balance of a specific token +held by the account. + +- **<DATA, 20 bytes>**: QUANTITY, 32 bytes - The token address is the key, and its value is the balance of that token held by the account, +represented in the smallest unit of the token (e.g., wei for ETH). + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getAllAccountBalances", + "params": ["0x98E9D288743839e96A8005a6B51C770Bbf7788C0"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "0x0000000000000000000000000000000000000000": "0x7c07ef5b520cb7d" + }, + "id": 1 +} +``` + +--- +<!-- // TODO: @dutterbutter +// Return to this method as it currently params are not working??. --> +## `zks_getL2ToL1MsgProof` + +Retrieves the proof for an L2 to L1 message. + +#### Parameters + +1. **uint32** - L2 block number. +1. **DATA, 20 bytes** - sender's address. +1. **DATA, 32 bytes** - message hash. +1. **number** - Optional. The log position in L2. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getL2ToL1MsgProof", + "params": [] + }' +``` + +#### Example Response + +```json +... +``` + +--- + +## `zks_getL2ToL1LogProof` + +Retrieves the log proof for an L2 to L1 transaction. + +#### Parameters + +1. **DATA, 32 bytes** - transaction hash. +1. **integer** - Optional. Index of the log. + +#### Returns + +- **proof**: Array of DATA, 32 bytes - array of strings, each representing a piece of the proof for the specified log. +- **id**: integer - identifier of the log within the transaction. +- **root**: DATA, 32 bytes - root hash of the proof, anchoring it to a specific state in the blockchain. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getL2ToL1LogProof", + "params": [ + "0x2a1c6c74b184965c0cb015aae9ea134fd96215d2e4f4979cfec12563295f610e" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "proof": [ + "0x8c48910df2ca7de509daf50b3182fcdf2dd6c422c6704054fd857d6c9516d6fc", + "0xc5028885760b8b596c4fa11497c783752cb3a3fb3b8e6b52d7e54b9f1c63521e", + "0xeb1f451eb8163723ee19940cf3a8f2a2afdf51100ce8ba25839bd94a057cda16", + "0x7aabfd367dea2b5306b8071c246b99566dae551a1dbd40da791e66c4f696b236", + "0xe4733f281f18ba3ea8775dd62d2fcd84011c8c938f16ea5790fd29a03bf8db89", + "0x1798a1fd9c8fbb818c98cff190daa7cc10b6e5ac9716b4a2649f7c2ebcef2272", + "0x66d7c5983afe44cf15ea8cf565b34c6c31ff0cb4dd744524f7842b942d08770d", + "0xb04e5ee349086985f74b73971ce9dfe76bbed95c84906c5dffd96504e1e5396c", + "0xac506ecb5465659b3a927143f6d724f91d8d9c4bdb2463aee111d9aa869874db" + ], + "id": 0, + "root": "0x920c63cb0066a08da45f0a9bf934517141bd72d8e5a51421a94b517bf49a0d39" + }, + "id": 1 +} +``` + +--- + +## `zks_L1BatchNumber` + +Retrieves the current L1 batch number. + +#### Parameters + +None + +#### Returns + +**QUANTITY, 8 bytes** - hexadecimal representation of the current L1 batch number. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_L1BatchNumber", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x72af2", + "id": 1 +} +``` + +--- + +## `zks_getBlockDetails` + +Retrieves details for a given block. + +- `committed`: The batch is closed and the state transition it creates exists on layer 1. + +- `proven`: The batch proof has been created, submitted, and accepted on layer 1. + +- `executed`: The batch state transition has been executed on L1; meaning the root state has been updated. + +#### Parameters + +The method requires a single parameter to specify the block whose details are to be retrieved. + +1. **uint32** - number of the block. + +#### Returns + +Object containing detailed information about the specified block. + +- **number**: uint32 - number of the block. +- **l1BatchNumber**: uint32 - corresponding L1 batch number. +- **timestamp**: uint32 - Unix timestamp when the block was committed. +- **l1TxCount**: uint32 - number of L1 transactions included in the block. +- **l2TxCount**: uint32 - number of L2 transactions included in the block. +- **rootHash**: DATA, 32 bytes - root hash of the block's state after execution. +- **status**: String - current status of the block (e.g., verified, executed). +- **commitTxHash**: DATA, 32 bytes - transaction hash of the commit operation on L1. +- **committedAt**: String - timestamp when the block was committed on L1. +- **proveTxHash**: DATA, 32 bytes - transaction hash of the proof submission on L1. +- **provenAt**: String - timestamp when the proof was submitted on L1. +- **executeTxHash**: DATA, 32 bytes - transaction hash of the execution on L1. +- **executedAt**: String - timestamp when the block execution was completed on L1. +- **l1GasPrice**: uint64 - L1 gas price at the time of the block's execution. +- **l2FairGasPrice**: uint64 - fair gas price on L2 at the time of the block's execution. +- **baseSystemContractsHashes**: Object - A collection of hashes for the base system contracts. +- **operatorAddress**: DATA, 20 bytes - address of the operator who committed the block. +- **protocolVersion**: String - version of the zkSync protocol the block was committed under. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getBlockDetails", + "params": [140599] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "number": 140599, + "l1BatchNumber": 1617, + "timestamp": 1679815038, + "l1TxCount": 0, + "l2TxCount": 20, + "rootHash": "0xf1adac176fc939313eea4b72055db0622a10bbd9b7a83097286e84e471d2e7df", + "status": "verified", + "commitTxHash": "0xd045e3698f018cb233c3817eb53a41a4c5b28784ffe659da246aa33bda34350c", + "committedAt": "2023-03-26T07:21:21.046817Z", + "proveTxHash": "0x1591e9b16ff6eb029cc865614094b2e6dd872c8be40b15cc56164941ed723a1a", + "provenAt": "2023-03-26T19:48:35.200565Z", + "executeTxHash": "0xbb66aa75f437bb4255cf751badfc6b142e8d4d3a4e531c7b2e737a22870ff19e", + "executedAt": "2023-03-27T07:44:52.187764Z", + "l1GasPrice": 20690385511, + "l2FairGasPrice": 250000000, + "baseSystemContractsHashes": { + "bootloader": "0x010007793a328ef16cc7086708f7f3292ff9b5eed9e7e539c184228f461bf4ef", + "default_aa": "0x0100067d861e2f5717a12c3e869cfb657793b86bbb0caa05cc1421f16c5217bc" + }, + "operatorAddress": "0xfeee860e7aae671124e9a4e61139f3a5085dfeee", + "protocolVersion": "Version5" + }, + "id": 1 +} +``` + +--- + +## `zks_getTransactionDetails` + +Retrieves details for a given transaction. + +#### Parameters + +1. **DATA, 32 bytes** - hash of the transaction. + +#### Returns + +Object containing detailed information about the specified transaction. + +- **isL1Originated**: Boolean - Indicates whether the transaction originated on Layer 1. +- **status**: String - current status of the transaction (e.g., verified). +- **fee**: QUANTITY, 32 bytes - transaction fee. +- **gasPerPubdata**: QUANTITY, 32 bytes - gas amount per unit of public data for this transaction. +- **initiatorAddress**: DATA, 20 bytes - address of the transaction initiator. +- **receivedAt**: String - timestamp when the transaction was received. +- **ethCommitTxHash**: DATA, 32 bytes - transaction hash of the commit operation. +- **ethProveTxHash**: DATA, 32 bytes - transaction hash of the proof submission. +- **ethExecuteTxHash**: DATA, 32 bytes - transaction hash of the execution. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getTransactionDetails", + "params": [ + "0x22de7debaa98758afdaee89f447ff43bab5da3de6acca7528b281cc2f1be2ee9" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "isL1Originated": true, + "status": "verified", + "fee": "0x0", + "gasPerPubdata": "0x320", + "initiatorAddress": "0x87869cb87c4fa78ca278df358e890ff73b42a39e", + "receivedAt": "2023-03-03T23:52:24.169Z", + "ethCommitTxHash": "0x3da5b6eda357189c9243c41c5a33b1b2ed0169be172705d74681a25217702772", + "ethProveTxHash": "0x2f482d3ea163f5be0c2aca7819d0beb80415be1a310e845a2d726fbc4ac54c80", + "ethExecuteTxHash": "0xdaff5fd7ff91333b161de54534b4bb6a78e5325329959a0863bf0aae2b0fdcc6" + }, + "id": 1 +} +``` + +--- + +## `zks_getRawBlockTransactions` + +Lists transactions in a block without processing them. + +#### Parameters + +1. **uint32** - number of the block. + +#### Returns + +Array of objects, each representing a raw transaction within +the specified block. Each transaction object includes common data, execution details, +a timestamp, and the raw transaction bytes. + +- **common_data**: Object - general information about the L2 transaction, +such as nonce, fees, initiator address, signature, +transaction type, input data, and paymaster parameters. +- **execute**: Object - Details regarding the execution of the transaction, including the contract address, calldata, value, and any factory dependencies. +- **received_timestamp_ms**: Number - timestamp when the transaction was received, in milliseconds. +- **raw_bytes**: DATA, 32 bytes - raw bytes of the transaction as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getRawBlockTransactions", + "params": [30098049] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "common_data": { + "L2": { + "nonce": 117, + "fee": { + "gas_limit": "0xbadbc", + "max_fee_per_gas": "0x202fbf0", + "max_priority_fee_per_gas": "0x0", + "gas_per_pubdata_limit": "0xc350" + }, + "initiatorAddress": "0xe7734c4a8201af41db64da90eddb4c19bbf64710", + "signature": [203, 118, 119, 63, 1, 54, 91, 252, 188, 23, 120, 51, 4, 28], + "transactionType": "EIP1559Transaction", + "input": { + "hash": "0x970c9480960818b05832f7e5b76a0c46956003c34942e005672d7ddc537aaa59", + "data": [2, 249, 1, 111, 1305, 187, 2, 170, 21, 166, 251, 255] + }, + "paymasterParams": { + "paymaster": "0x0000000000000000000000000000000000000000", + "paymasterInput": [] + } + } + }, + "execute": { + "contractAddress": "0x5155704bb41fde152ad3e1ae402e8e8b9ba335d3", + "calldata": "0fa0x5155704bb41fde152ad3e1ae402e8e8b9ba335d3", + "value": "0x0", + "factoryDeps": null + }, + "received_timestamp_ms": 1711649348872, + "raw_bytes": "0x02f9016f8201447580840202fbf" + } + // Additional transactions + ], + "id": 1 +} +``` + +--- + +## `zks_getL1BatchDetails` + +Retrieves details for a given L1 batch. + +#### Parameters + +1. **uint32** - L1 batch number. + +#### Returns + +Object of details for L1 batch. + +- **number**: uint32 - L1 batch number. +- **timestamp**: uint64 - Unix timestamp when the batch was processed. +- **l1TxCount**: uint32 - number of L1 transactions included in the batch. +- **l2TxCount**: uint32 - number of L2 transactions associated with this batch. +- **rootHash**: DATA, 32 bytes - root hash of the state after processing the batch. +- **status**: String - current status of the batch (e.g., verified). +- **commitTxHash**: DATA, 32 bytes - Ethereum transaction hash for the commit operation. +- **committedAt**: String - timestamp when the batch was committed on Ethereum. +- **proveTxHash**: DATA, 32 bytes - Ethereum transaction hash for the proof submission. +- **provenAt**: String - timestamp when the proof was submitted. +- **executeTxHash**: DATA, 32 bytes - Ethereum transaction hash for the execution. +- **executedAt**: String - timestamp when the execution was completed. +- **l1GasPrice**: uint64 - gas price on L1 at the time of batch processing. +- **l2FairGasPrice**: uint64 - fair gas price on L2 at the time of batch processing. +- **baseSystemContractsHashes**: Object - Hashes of the base system contracts involved in the batch. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getL1BatchDetails", + "params": [468355] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "number": 468355, + "timestamp": 1711649164, + "l1TxCount": 1, + "l2TxCount": 2363, + "rootHash": "0x7b31ef880f09238f13b71a0f6bfea340b9c76d01bba0712af6aa0a4f224be167", + "status": "verified", + "commitTxHash": "0x5b2598bf1260d498c1c6a05326f7416ef2a602b8a1ac0f75b583cd6e08ae83cb", + "committedAt": "2024-03-28T18:24:49.713730Z", + "proveTxHash": "0xc02563331d0a83d634bc4190750e920fc26b57096ec72dd100af2ab037b43912", + "provenAt": "2024-03-29T03:09:19.634524Z", + "executeTxHash": "0xbe1ba1fdd17c2421cf2dabe2908fafa26ff4fa2190a7724d16295dd9df72b144", + "executedAt": "2024-03-29T18:18:04.204270Z", + "l1GasPrice": 47875552051, + "l2FairGasPrice": 25000000, + "baseSystemContractsHashes": { + "bootloader": "0x010007ede999d096c84553fb514d3d6ca76fbf39789dda76bfeda9f3ae06236e", + "default_aa": "0x0100055b041eb28aff6e3a6e0f37c31fd053fc9ef142683b05e5f0aee6934066" + } + }, + "id": 1 +} +``` + +--- + +## `zks_getBytecodeByHash` + +Retrieves the bytecode of a transaction by its hash. + +#### Parameters + +1. **DATA, 32 bytes** - hash of the transaction. + +#### Returns + +**Array of uint8** - array of the transaction's bytecode. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getBytecodeByHash", + "params": ["0x0100067d861e2f5717a12c3e869cfb657793b86bbb0caa05cc1421f16c5217bc"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + 0, + 4, + 0, + ... + ], + "id": 1 +} +``` + +--- + +## `zks_getL1BatchBlockRange` + +Returns the range of blocks contained within a batch given by the batch number. + +The range is provided by the beginning and end block numbers in hexadecimal. + +#### Parameters + +1. **L1BatchNumber** - the layer 1 batch number. + +#### Returns + +**Array of Hex Strings** - array containing the beginning and end block numbers in hexadecimal. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getL1BatchBlockRange", + "params": [12345] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + "0x116fec", + "0x117015" + ], + "id": 1 +} +``` + +--- + +## `zks_getL1GasPrice` + +Retrieves the current L1 gas price. + +#### Parameters + +None + +#### Returns + +**QUANTITY, 8 bytes** - current L1 gas price in hexadecimal format, representing the amount of wei per unit of gas. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getL1GasPrice", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x11cccd9f58", + "id": 1 +} +``` + +--- + +## `zks_getFeeParams` + +Retrieves the current fee parameters. + +#### Parameters + +None + +#### Returns + +Object + +- **V2**: Object - fee parameter configuration for the current version of the zkSync protocol. +- **config**: Object - settings related to transaction fee computation. +- **minimal_l2_gas_price**: uint64 - minimal gas price on L2. +- **compute_overhead_part**: float64 - compute overhead part in fee calculation. +- **pubdata_overhead_part**: float64 - public data overhead part in fee calculation. +- **batch_overhead_l1_gas**: uint64 - overhead in L1 gas for a batch of transactions. +- **max_gas_per_batch**: uint64 - maximum gas allowed per batch. +- **max_pubdata_per_batch**: uint64 - maximum amount of public data allowed per batch. +- **l1_gas_price**: uint64 - current L1 gas price. +- **l1_pubdata_price**: uint64 - price of storing public data on L1. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getFeeParams", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "V2": { + "config": { + "minimal_l2_gas_price": 25000000, + "compute_overhead_part": 0, + "pubdata_overhead_part": 1, + "batch_overhead_l1_gas": 800000, + "max_gas_per_batch": 200000000, + "max_pubdata_per_batch": 240000 + }, + "l1_gas_price": 46226388803, + "l1_pubdata_price": 100780475095 + } + }, + "id": 1 +} +``` + +--- + +## `zks_getProtocolVersion` + +Gets the protocol version. + +#### Parameter + +1. **uint16** - Optional. Specific version ID. + +#### Returns + +Object + +- **version_id**: uint16 - protocol version ID. +- **timestamp**: uint64 - Unix timestamp of the version's activation. +- **verification_keys_hashes**: Object - Contains the hashes of various verification keys used in the protocol. +- **base_system_contracts**: Object - Addresses of the base system contracts, like the bootloader and default account abstraction (AA). +- **l2_system_upgrade_tx_hash**: DATA, 32 bytes - hash of the transaction used for the system upgrade, if any. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getProtocolVersion", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "version_id": 22, + "timestamp": 1710264138, + "verification_keys_hashes": { + "params": { + "recursion_node_level_vk_hash": "0x5a3ef282b21e12fe1f4438e5bb158fc5060b160559c5158c6389d62d9fe3d080", + "recursion_leaf_level_vk_hash": "0x400a4b532c6f072c00d1806ef299300d4c104f4ac55bd8698ade78894fcadc0a", + "recursion_circuits_set_vks_hash": "0x0000000000000000000000000000000000000000000000000000000000000000" + }, + "recursion_scheduler_level_vk_hash": "0x063c6fb5c70404c2867f413a8e35563ad3d040b1ad8c11786231bfdba7b472c7" + }, + "base_system_contracts": { + "bootloader": "0x010007ede999d096c84553fb514d3d6ca76fbf39789dda76bfeda9f3ae06236e", + "default_aa": "0x0100055b041eb28aff6e3a6e0f37c31fd053fc9ef142683b05e5f0aee6934066" + }, + "l2_system_upgrade_tx_hash": null + }, + "id": 1 +} +``` + +--- + +## `zks_getProof` + +This method generates Merkle proofs for one or more storage values associated with a specific account, +accompanied by a proof of their authenticity. It verifies that these values remain unaltered. + +Similar to Ethereum's `eth_getProof`, this method provides verification means under zkSync Era's distinct +Merkle tree architecture, noting several key differences: + +- The retrieval of values and their respective proofs is determined by an L1 batch number instead of a block number. +- zkSync Era employs a different Merkle tree structure, necessitating a unique approach to proof verification. +Unlike Ethereum's two-level hexadecimal trie—where the top level maps to accounts and the bottom to +account storage slots—Era uses a single-level, full binary tree with 256-bit keys. +- In Ethereum, account-level values are mapped using specific combinations of account and storage keys. For example, to +store the code hash for account address A, it uses account `0x0000000000000000000000000000000000008002` +and a storage key generated by padding A's address. Conversely, zkSync Era's Merkle tree specifics are as follows: + +**zkSync Era Merkle Tree Details:** + +- The tree is a one-level, full binary tree, supporting 256-bit keys and 40-byte values. +- Keys are derived by reversing the output of `reversed(blake2s256([0_u8; 12] ++ account_address ++ storage_key))`, +where `account_address` is the 20-byte address, and `storage_key` is a 32-byte key from the account's storage. +The `++` operator denotes byte concatenation, and `reversed` inverts the byte sequence order. +- Values are structured as `big_endian(leaf_index) ++ storage_value`, with `leaf_index` marking +the 1-based index in entry order, and `storage_value` representing the 32-byte slot value. +- Empty tree entries are marked with 40 zero bytes (`[0_u8; 40]`). +- Leaf hashing utilizes `blake2s256` without tags, treating vacant leaves as `blake2s256([0_u8; 40])`. +- Internal node hashes are computed by concatenating the hashes of their child nodes, again using `blake2s256` +without tags: `blake2s256(left_child_hash ++ right_child_hash)` + +#### Parameters + +1. **DATA, 20 bytes** - account address to fetch storage values and proofs for. +1. **Array of Data, 32 bytes** - the keys in the account. +1. **uint32** - Number of the L1 batch specifying the point in time at which the requested values are returned. + +#### Returns + +The method returns an object containing the account details and proofs for storage keys. The structure of the returned object is as follows: + +- **address**: account address associated with the storage proofs. +- **storageProof**: array of objects, each representing a storage proof for the requested keys. + - **key**: DATA, 32 bytes - storage key for which the proof is provided. + - **value**: DATA, 32 bytes - value stored in the specified storage key at the time of the specified `l1BatchNumber`. + - **index**: uint64 - A 1-based index representing the position of the tree entry within the Merkle tree. This index is used + to help reconstruct the Merkle path during verification. + - **proof**: Array of DATA, 32 bytes - An array of 32-byte hashes that constitute the Merkle path from the leaf node + (representing the storage key-value pair) to the root of the Merkle tree. + The path is ordered from the root to the leaf. + The root hash itself is not included in this array because it is published on L1 as part of the L1 batch commit data. + If the `proof` array contains fewer than 256 hashes, it means that some hashes corresponding to entirely empty subtrees + are omitted starting from the end of the Merkle path. + For instance, if there are 255 hashes in the `proof`, + the single omitted hash is the hash of an empty subtree (`empty_value_hash = blake2s256([0_u8; 40])`). + Further omitted hashes are calculated progressively as hashes of the concatenated pair of the previous level's omitted hash + (e.g., `blake2s256(empty_value_hash ++ empty_value_hash)`), and so on, indicating progressively higher levels of empty subtrees in the Merkle tree. + +Using the information in each `storageProof`, one can reconstruct the Merkle tree's root hash +for a given storage key and compare it to the reference root hash published on L1. +If the two hashes match, it confirms the authenticity of the provided storage value for the given key. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "zks_getProof", + "params": [ + "0x0000000000000000000000000000000000008003", + [ + "0x8b65c0cf1012ea9f393197eb24619fd814379b298b238285649e14f936a5eb12" + ], + 354895 + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "address": "0x0000000000000000000000000000000000008003", + "storageProof": [ + { + "key": "0x8b65c0cf1012ea9f393197eb24619fd814379b298b238285649e14f936a5eb12", + "proof": [ + "0xe3e8e49a998b3abf8926f62a5a832d829aadc1b7e059f1ea59ffbab8e11edfb7", + ... + ], + "value": "0x0000000000000000000000000000000000000000000000000000000000000060", + "index": 27900957 + } + ] + }, + "id": 1 +} +``` + +### `zks_sendRawTransactionWithDetailedOutput` + +Executes a transaction and returns its hash, storage logs, and events that would have been generated +if the transaction had already been included in the block. The API has a similar behaviour to +`eth_sendRawTransaction` but with some extra data returned from it. + +With this API Consumer apps can apply "optimistic" events in their applications instantly without having to wait for +zkSync block confirmation time. + +It’s expected that the optimistic logs of two uncommitted transactions that modify the same state will not have causal +relationships between each other. + +#### Inputs + +| Parameter | Type | Description | +| --------- | -------- | --------------------------------------------------------------------------- | +| `data` | `string` | The signed transaction. Typically, signed with a library such as ethers.js. | + +#### Example Request + +```sh +curl -X POST -H "Content-Type: application/json" \ +--data '{"jsonrpc":"2.0","method":"eth_sendRawTransaction","params":["Signed Transaction"],"id":1}' \ +"https://mainnet.era.zksync.io" +``` + +#### Example Response + +```json +{ + "transactionHash": "0xd586a9381ac33a70d1c34704664209242ee90316878fc1695aa8e4cf553c8595", + "storageLogs": [ + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x7", + "writtenValue": "0x40000000000000000000000006641f961" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x6", + "writtenValue": "0x5f5e100" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x9", + "writtenValue": "0xc0000000000000000000000006641f961" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x16", + "writtenValue": "0xe1ef29fc6c51f74bbdef5bc1406e3c9925d89c5b1f79215648b82ac15419bcbe" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0xa", + "writtenValue": "0x0" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x10c", + "writtenValue": "0xc0000000000000000000000006641f961" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0xa", + "writtenValue": "0xe3ed371c32f62f3b3a28d51b909b2668e293c6cbfa4b4fd549c8f00a9a93a296" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x110", + "writtenValue": "0x0" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x10f", + "writtenValue": "0x88" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x1", + "writtenValue": "0x8001" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x2", + "writtenValue": "0x5f5e100" + }, + { + "address": "0x0000000000000000000000000000000000008003", + "key": "0xeaa2b2fbf0b42c559059e5e9510edc15755f1c1883f0e41d5ba5f9aea4ac201a", + "writtenValue": "0x4" + }, + { + "address": "0x000000000000000000000000000000000000800a", + "key": "0xeaa2b2fbf0b42c559059e5e9510edc15755f1c1883f0e41d5ba5f9aea4ac201a", + "writtenValue": "0x55ce6fa97340" + }, + { + "address": "0x000000000000000000000000000000000000800a", + "key": "0x31b66141c575a054316a84da9cf4aa6fe0abd373cab1bf4ac029ffc061aae0da", + "writtenValue": "0xb9b031bf400" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x1", + "writtenValue": "0x36615cf349d7f6344891b1e7ca7c72883f5dc049" + }, + { + "address": "0x000000000000000000000000000000000000800b", + "key": "0x1", + "writtenValue": "0x8001" + }, + { + "address": "0x000000000000000000000000000000000000800a", + "key": "0x31b66141c575a054316a84da9cf4aa6fe0abd373cab1bf4ac029ffc061aae0da", + "writtenValue": "0xa7557c54f00" + }, + { + "address": "0x000000000000000000000000000000000000800a", + "key": "0xeaa2b2fbf0b42c559059e5e9510edc15755f1c1883f0e41d5ba5f9aea4ac201a", + "writtenValue": "0x56f41b001840" + } + ], + "events": [ + { + "address": "0x000000000000000000000000000000000000800a", + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + "0x00000000000000000000000036615cf349d7f6344891b1e7ca7c72883f5dc049", + "0x0000000000000000000000000000000000000000000000000000000000008001" + ], + "data": "0x00000000000000000000000000000000000000000000000000000b9b031bf400", + "blockHash": null, + "blockNumber": null, + "l1BatchNumber": "0x4", + "transactionHash": "0xd586a9381ac33a70d1c34704664209242ee90316878fc1695aa8e4cf553c8595", + "transactionIndex": "0x0", + "logIndex": null, + "transactionLogIndex": null, + "logType": null, + "removed": false + }, + { + "address": "0x000000000000000000000000000000000000800a", + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + "0x0000000000000000000000000000000000000000000000000000000000008001", + "0x00000000000000000000000036615cf349d7f6344891b1e7ca7c72883f5dc049" + ], + "data": "0x00000000000000000000000000000000000000000000000000000125ab56a500", + "blockHash": null, + "blockNumber": null, + "l1BatchNumber": "0x4", + "transactionHash": "0xd586a9381ac33a70d1c34704664209242ee90316878fc1695aa8e4cf553c8595", + "transactionIndex": "0x0", + "logIndex": null, + "transactionLogIndex": null, + "logType": null, + "removed": false + } + ] +} +``` diff --git a/content/00.build/70.api-reference/30.debug-rpc.md b/content/00.build/70.api-reference/30.debug-rpc.md new file mode 100644 index 00000000..51d1d670 --- /dev/null +++ b/content/00.build/70.api-reference/30.debug-rpc.md @@ -0,0 +1,234 @@ +--- +title: Debug JSON-RPC API +description: Methods useful for debugging purposes with zkSync Era. +github: https://github.com/matter-labs/zksync-era/blob/main/core/lib/web3_decl/src/namespaces/debug.rs +--- + +## `debug_traceBlockByHash` + +Traces all calls made from a specific block by its L2 hash. + +#### Parameters + +1. **DATA, 32 bytes** - hash defining the L2 block. +1. **TracerConfig** - Optional configuration for tracing. Refer to the +[TraceConfig documentation](https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-debug#traceconfig) for more details. + +#### Returns + +Array of objects, each representing a traced call made from the specified block. + +:display-partial{path="build/api-reference/_partials/_trace-object"} + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "debug_traceBlockByHash", + "params": ["0x4bd0bd4547d8f8a4fc86a024e54558e156c1acf43d82e24733c6dac2fe5c5fc7"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "type": "Call", + "from": "0x0000000000000000000000000000000000000000", + "to": "0x0000000000000000000000000000000000008001", + "gas": "0x18be25", + "gasUsed": "0x7603b", + "value": "0xa1e94fc0fe6043", + "output": "0x", + "input": "0x", + "error": null, + "revertReason": null, + "calls": [...] + }, + ... + ] +} +``` + +--- + +## `debug_traceBlockByNumber` + +Traces all calls made from a specific block by its L2 block number. + +#### Parameters + +1. **QUANTITY, 8 bytes | TAG** - The number of the block to trace. This can be a hex-encoded number or one of the strings "earliest", "latest", or "pending". +1. **TracerConfig** - Optional configuration for tracing. Refer to the +[TraceConfig documentation](https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-debug#traceconfig) for more details. + +#### Returns + +Array of objects, each representing a traced call made from the specified block. + +:display-partial{path="build/api-reference/_partials/_trace-object"} + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "debug_traceBlockByNumber", + "params": ["0x24b258"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "result": { + "type": "Call", + "from": "0x0000000000000000000000000000000000000000", + "to": "0x0000000000000000000000000000000000008001", + "gas": "0x18be25", + "gasUsed": "0x7603b", + "value": "0xa1e94fc0fe6043", + "output": "0x", + "input": "0x", + "error": null, + "revertReason": null, + "calls": [...] + }, + ... + ] +} +``` + +--- + +## `debug_traceCall` + +Traces a call made at a specific block, by block number or hash. + +#### Parameters + +1. **CallRequest** - The call request to trace, containing fields like `from`, `to`, `data`, and optionally `gas`, `gasPrice`, and `value`. +1. **DATA, 32 bytes | QUANTITY, 8 bytes** - Optional. The block identifier, which can be a block number as a hex-encoded number or a block hash. +If not specified, the latest block is used. +1. **TracerConfig** - Optional. Configuration options for the trace. For more details, refer to the [TraceConfig documentation](https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-debug#traceconfig). + +#### Returns + +Array of objects, each representing a traced call made from the specified block. + +:display-partial{path="build/api-reference/_partials/_trace-object"} + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "debug_traceCall", + "params": [ + { + "from": "0x1111111111111111111111111111111111111111", + "to": "0x2222222222222222222222222222222222222222", + "data": "0xffffffff" + }, + "0x24b258" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "type": "Call", + "from": "0x0000000000000000000000000000000000000000", + "to": "0x0000000000000000000000000000000000008001", + "gas": "0x0", + "gasUsed": "0x6b4b", + "value": "0x0", + "output": "0x", + "input": "0xffffffff", + "error": null, + "revertReason": null, + "calls": [] + }, + "id": 1 +} +``` + +--- + +## `debug_traceTransaction` + +Uses the [EVM's `callTracer`](https://geth.ethereum.org/docs/developers/evm-tracing/built-in-tracers#call-tracer) +to return a debug trace of a specific transaction given by its transaction hash. + +#### Parameters + +1. **DATA, 32 bytes** - The 32-byte hash of the transaction to trace. +1. **TracerConfig** - Optional. Configuration options for the trace. For more details, refer to the [TraceConfig documentation](https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-debug#traceconfig). + +#### Returns + +Array of objects, each representing a traced call made from the specified block. + +:display-partial{path="build/api-reference/_partials/_trace-object"} + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "debug_traceTransaction", + "params": ["0x4b228f90e796de5a18227072745b0f28e0c4a4661a339f70d3bdde591d3b7f3a"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "type": "Call", + "from": "0x0000000000000000000000000000000000000000", + "to": "0x0000000000000000000000000000000000008001", + "gas": "0x154800", + "gasUsed": "0xc2419", + "value": "0x0", + "output": "0x", + "input": "0x095ea7b30000000000000000000000002da10a1e27bf85cedd8ffb1abbe97e53391c0295ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + "error": null, + "revertReason": null, + "calls": [...] + }, + ... + ] +} +``` diff --git a/content/00.build/70.api-reference/35.ethereum-rpc.md b/content/00.build/70.api-reference/35.ethereum-rpc.md new file mode 100644 index 00000000..94528d70 --- /dev/null +++ b/content/00.build/70.api-reference/35.ethereum-rpc.md @@ -0,0 +1,1516 @@ +--- +title: Ethereum JSON-RPC API +description: JSON-RPC API methods for the eth_ namespace for zkSync Era. +--- + +ZKsync Era supports the standard [Ethereum JSON-RPC API](https://ethereum.org/en/developers/docs/apis/json-rpc/). + +## Important Differences for Developers + +When working with zkSync, there are specific differences you should be aware of: + +1. **Block Data Retrieval Methods**: + - Methods that return data about a block, such as `eth_getBlockByHash`, `eth_getBlockByNumber`, and Geth’s pubsub + API `eth_subscribe` with the `newHeads` parameter, do not provide the actual `receiptsRoot`, `transactionsRoot`, and `stateRoot` values. + - Instead, these fields contain zero values because zkSync’s L2 blocks do not include the concept of a state root; only L1 batches have this concept. + +2. **Unsupported Method**: + - The method `eth_sendTransaction` is intentionally not supported in zkSync. + +## `eth_chainId` + +Gets the current chain ID. + +#### Parameters + +None + +#### Returns + +**QUANTITY** - hexadecimal representation of the current blockchain network's chain ID. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_chainId", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x144", + "id": 1 +} +``` + +--- + +## `eth_call` + +Executes a new message call immediately without creating a transaction on the block chain. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_call-request-params"} + +1. **BlockIdVariant** - Optional. +See the [default block parameters](https://ethereum.org/en/developers/docs/apis/json-rpc/#default-block). + +#### Returns + +**DATA** - The data returned by the smart contract function, encoded in hexadecimal format. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_call", + "params": [ + { + "to": "0xc94770007dda54cF92009BFF0dE90c06F603a09f" + }, + "latest" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x", + "id": 1 +} +``` + +--- + +## `eth_estimateGas` + +Estimates the amount of gas needed to execute a call. The `from` field cannot be a smart contract that is not a `SmartAccount` if so an exception is hit. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_call-request-params"} +1. **uint32** - Optional block number. + +#### Returns + +**QUANTITY** - The estimated amount of gas in hexadecimal format. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_estimateGas", + "params": [ + { + "to": "0x...", + "data": "0x..." + } + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "result": "0x5cec" +} +``` + +--- + +## `eth_gasPrice` + +Retrieves the current average gas price in the network, expressed in `gwei`. +This value provides an estimate of how much each unit of gas would cost for transactions +on the network at the time of the query. It's particularly useful for dynamically adjusting +transaction fees to current network conditions. + +#### Parameters + +None + +#### Returns + +**QUANTITY** - The current average gas price on the network, represented in `gwei` and encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_gasPrice", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x17d7840", + "id": 1 +} +``` + +--- + +## `eth_newFilter` + +Initiates a new filter for listening to specific events emitted by smart contracts +or other blockchain actions. This feature enables applications to react to events and +updates in real-time by setting criteria for the events they're interested in. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_filter-object"} + +#### Returns + +**QUANTITY** - unique identifier of the newly created filter, encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_newFilter", + "params": [ + { + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ] + } + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0xb415d4b0e6ca750802be8c84c99a68170deeb9ed09c4e2eb0dc5299ab715e978", + "id": 1 +} +``` + +--- + +## `eth_newBlockFilter` + +Creates a filter to notify when a new block arrives. + +#### Parameters + +None + +#### Returns + +**QUANTITY** - unique identifier of the newly created block filter, encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_newBlockFilter", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0xb825a38f6350ff4d75d806e6f83a42a31d39fc7ef4fde02b404e8edeef6799b", + "id": 1 +} +``` + +--- + +## `eth_uninstallFilter` + +Removes a filter that was previously created using `eth_newFilter`, `eth_newBlockFilter`, or +`eth_newPendingTransactionFilter`. This method is used to stop receiving updates for the +specified filter and to clean up resources associated with it on the node. + +#### Parameters + +1. **QUANTITY, 32 bytes** - unique identifier of the filter to be removed, originally returned by the filter creation method. + +#### Returns + +**Boolean** - `true` if the filter was successfully uninstalled; otherwise, `false`. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_uninstallFilter", + "params": ["0xb825a38f6350ff4d75d806e6f83a42a31d39fc7ef4fde02b404e8edeef6799b"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": true, + "id": 1 +} +``` + +## `eth_newPendingTransactionFilter` + +Sets up a new filter to provide notifications for transactions that enter the pending state, +which means they are broadcast to the network but not yet included in a block. This filter is +useful for tracking transactions that are awaiting confirmation. + +#### Parameters + +None + +#### Returns + +**QUANTITY, 32 bytes** - unique identifier of the new filter for pending transactions, encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_newPendingTransactionFilter", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x9d844950bf3e368b43490ff4efdc0a009887365103f9fece0d8cc44adabf6a82", + "id": 1 +} +``` + +--- + +## `eth_getLogs` + +Retrieves the logs matching a filter object. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_filter-object"} + +#### Returns + +Array of log objects that match the filter criteria. Each log object contains detailed information about a single log entry. + +- **address**: Data, 20 bytes - address of the contract that generated the log. +- **topics**: Array of Data - Topics are indexed event parameters stored in the log. +- **data**: DATA - The data contained in the log. +- **blockHash**: DATA, 32 bytes - Hash of the block where this log was in. +- **blockNumber**: QUANTITY - block number where this log was in. +- **transactionHash**: DATA, 32 bytes - Hash of the transaction that generated this log. +- **transactionIndex**: QUANTITY - Integer of the transaction's index position in the block. +- **logIndex**: QUANTITY - Integer of the log's index position in the block. +- **transactionIndex**: QUANTITY - Integer of the log's index position in the transaction. +- **logType**: String - The type of log (can be `null`). +- **removed**: Boolean - Indicates if the log was removed due to a chain reorganization. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getLogs", + "params": [ + { + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ], + "fromBlock": "latest" + } + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "address": "0x000000000000000000000000000000000000800a", + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + "0x00000000000000000000000091307ca99c2a9caac9d24259ae719a4a98453ba1", + "0x0000000000000000000000000000000000000000000000000000000000008001" + ], + "data": "0x00000000000000000000000000000000000000000000000000007df3cbd10c80", + "blockHash": "0xa0ddf35a0e35d03f533a2620d8c37fd4162d740cec3d3c635f43ca213f8a051f", + "blockNumber": "0x1d2ba84", + "transactionHash": "0xfdfcfdc6a0e2e3d09218749a752a2c2933f9eda5e9985c7fa3d861cb0112817d", + "transactionIndex": "0x0", + "logIndex": "0x0", + "transactionLogIndex": "0x0", + "logType": null, + "removed": false + }, + ... + ], + "id": 1 +} +``` + +--- + +## `eth_getFilterLogs` + +<!-- TODO: @dutterbutter check with platform about support here. Does not seem to be working as intended. --> + +Retrieves the logs for a filter created with `eth_newFilter`. + +#### Parameters + +1. **QUANTITY, 32 bytes** - the filter id. + +#### Returns + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getFilterLogs", + "params": [ + "0xae2bfd759a98fd5e7a262f785b7706103a174391c5081dda92fea2cf6d9f94a6" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + {...} + ], + "id": 1 +} +``` + +--- + +## `eth_getFilterChanges` + +<!-- TODO: @dutterbutter check with platform about support here. Does not seem to be working as intended. --> + +Retrieves the logs since the last poll for a filter created with `eth_newFilter`. + +#### Parameters + +1. **QUANTITY, 32 bytes** - the filter id. + +#### Returns + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getFilterChanges", + "params": [ + "0x127e9eca4f7751fb4e5cb5291ad8b455" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + {...} + ], + "id": 1 +} +``` + +--- + +## `eth_getBalance` + +Gets the balance of an account at a specific block. + +#### Parameters + +1. **DATA, 20 bytes** - address of the account whose balance is being queried. +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +**QUANTITY** - The balance of the account at the specified block, encoded as a hexadecimal string representing the value in gwei. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getBalance", + "params": [ + "0x103301a002a8AaDC8Fb83A2A70740FA6da7f83b8", + "latest" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "id":1, + "result": "0x12e54351196e564" +} +``` + +--- + +## `eth_getBlockByNumber` + +Retrieves a block by its number. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} +1. **Boolean** - A boolean flag indicating whether to return full transaction objects (`true`) or just their hashes (`false`). + +#### Returns + +Object representing the requested block, including various metadata fields +and either a list of transaction hashes or full transaction objects, +based on the `full_transactions` parameter. + +- **hash**: DATA, 32 bytes - hash of the block. +- **parentHash**: DATA, 32 bytes - hash of the block's parent. +- **sha3Uncles**: DATA, 32 bytes - SHA3 of the uncles data in the block. +- **miner**: DATA, 20 bytes - address of the miner who mined the block. +- **stateRoot**: DATA, 32 bytes - root of the final state trie of the block. +- **transactionsRoot**: DATA, 32 bytes - root of the trie of the transactions in the block. +- **receiptsRoot**: Data, 32 bytes - root of the receipts trie of the block. +- **number**: QUANTITY - block number. +- **l1BatchNumber**: QUANTITY - (Optional) The L1 batch number associated with the block. +- **gasUsed**: QUANTITY - total gas used by all transactions in this block. +- **gasLimit**: QUANTITY - gas limit of the block. +- **baseFeePerGas**: QUANTITY - (Optional) base fee per gas in the block. +- **extraData**: DATA - Extra data attached to the block. +- **logsBloom**: DATA, 256 bytes - The bloom filter for the logs contained in the block. +- **timestamp**: QUANTITY - timestamp for when the block was collated. +- **l1BatchTimestamp**: QUANTITY - (Optional) L1 batch timestamp associated with the block. +- **difficulty**: QUANTITY - difficulty of the block. +- **totalDifficulty**: QUANTITY - total difficulty of the chain up to this block. +- **uncles**: Array - An array of uncle hashes. +- **transactions**: Array - An array of transaction hashes or transaction objects, depending on the `full_transactions` parameter. +- **size**: QUANTITY - size of the block in bytes. +- **mixHash**: DATA - mix hash of the block. +- **nonce**: DATA, 8 bytes - nonce of the block. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getBlockByNumber", + "params": ["0x1d1551e", false] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "hash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "parentHash": "0x41f40b22c984aaf1c4cc98bfc8357156729f0a57ddc367fca8b38866b6b4a600", + "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "miner": "0x0000000000000000000000000000000000000000", + "stateRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "transactionsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "receiptsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "number": "0x1d1551e", + "l1BatchNumber": "0x72ae1", + "gasUsed": "0x1215d56", + "gasLimit": "0xffffffff", + "baseFeePerGas": "0x17d7840", + "extraData": "0x", + "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "timestamp": "0x660c1740", + "l1BatchTimestamp": "0x660c16dc", + "difficulty": "0x0", + "totalDifficulty": "0x0", + "sealFields": [], + "uncles": [], + "transactions": [ + "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "0xf200616da35820f5f407ffcc0a3fef84c8124638377eb2fdb4925888a90ff7da", + "0x54ba07e65314ea38ececd0f7f623306f00103727865bf036a0117181b596c95b", + "0x9453e01668129bc6e0bcece2a28a424fdde1624e5db03f83de51430ff3954b45", + "0xac42bce47be22c4bd7891b476be7ff7a9c67442c8359da0e5c598fa2f9fe71ef", + "0x1b1db6412cdf4ca8158b5efbeeada70d1d6e67a290b529e1fec145bab1e407da", + "0x5fe0e3ee65f5c6be6975bccfd116d54dd5d8b3c83074165a5641087fe36d2ee3", + "0x6ce2631af03a2d939a34d159ae01d025da4ea162ed5bf6769e18ace64cce29a9", + "0x386d0fdb243f20e8b2d2683686baa315b16cc81f7b80ea58f69b628e4e047a32", + "0x2c72143b520c4ad826972fc0725a8eeac590188b683cbb0cf103e5be60349607", + "0x0f7d10d27357b2f69b63cc808b76faf4c3d3bbd6dae59a10930399f6a7ab476b" + ], + "size": "0x0", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x0000000000000000" + }, + "id": 2 +} +``` + +--- + +## `eth_getBlockByHash` + +Retrieves a block by its hash. + +#### Parameters + +1. **DATA, 32 bytes** - hexadecimal string representing the hash of the block. +1. **Boolean** - A boolean flag indicating whether to return full transaction objects (`true`) or just their hashes (`false`). + +#### Returns + +Object containing detailed information about the block and its transactions. + +- **hash**: DATA, 32 bytes - hash of the block. +- **parentHash**: DATA, 32 bytes - hash of the block's parent. +- **sha3Uncles**: DATA, 32 bytes - SHA3 of the uncles data in the block. +- **miner**: DATA, 20 bytes - address of the miner who mined the block. +- **stateRoot**: DATA, 32 bytes - root of the final state trie of the block. +- **transactionsRoot**: DATA, 32 bytes - root of the trie of the transactions in the block. +- **receiptsRoot**: Data, 32 bytes - root of the receipts trie of the block. +- **number**: QUANTITY - block number. +- **l1BatchNumber**: QUANTITY - (Optional) The L1 batch number associated with the block. +- **gasUsed**: QUANTITY - total gas used by all transactions in this block. +- **gasLimit**: QUANTITY - gas limit of the block. +- **baseFeePerGas**: QUANTITY - (Optional) base fee per gas in the block. +- **extraData**: DATA - Extra data attached to the block. +- **logsBloom**: DATA, 256 bytes - The bloom filter for the logs contained in the block. +- **timestamp**: QUANTITY - timestamp for when the block was collated. +- **l1BatchTimestamp**: QUANTITY - (Optional) L1 batch timestamp associated with the block. +- **difficulty**: QUANTITY - difficulty of the block. +- **totalDifficulty**: QUANTITY - total difficulty of the chain up to this block. +- **uncles**: Array - An array of uncle hashes. +- **transactions**: Array - An array of transaction hashes or transaction objects, depending on the `full_transactions` parameter. +- **size**: QUANTITY - size of the block in bytes. +- **mixHash**: DATA - mix hash of the block. +- **nonce**: DATA, 8 bytes - nonce of the block. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getBlockByHash", + "params": [ + "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + false + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "hash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "parentHash": "0x41f40b22c984aaf1c4cc98bfc8357156729f0a57ddc367fca8b38866b6b4a600", + "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "miner": "0x0000000000000000000000000000000000000000", + "stateRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "transactionsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "receiptsRoot": "0x0000000000000000000000000000000000000000000000000000000000000000", + "number": "0x1d1551e", + "l1BatchNumber": "0x72ae1", + "gasUsed": "0x1215d56", + "gasLimit": "0xffffffff", + "baseFeePerGas": "0x17d7840", + "extraData": "0x", + "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "timestamp": "0x660c1740", + "l1BatchTimestamp": "0x660c16dc", + "difficulty": "0x0", + "totalDifficulty": "0x0", + "sealFields": [], + "uncles": [], + "transactions": [ + "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "0xf200616da35820f5f407ffcc0a3fef84c8124638377eb2fdb4925888a90ff7da", + "0x54ba07e65314ea38ececd0f7f623306f00103727865bf036a0117181b596c95b", + "0x9453e01668129bc6e0bcece2a28a424fdde1624e5db03f83de51430ff3954b45", + "0xac42bce47be22c4bd7891b476be7ff7a9c67442c8359da0e5c598fa2f9fe71ef", + "0x1b1db6412cdf4ca8158b5efbeeada70d1d6e67a290b529e1fec145bab1e407da", + "0x5fe0e3ee65f5c6be6975bccfd116d54dd5d8b3c83074165a5641087fe36d2ee3", + "0x6ce2631af03a2d939a34d159ae01d025da4ea162ed5bf6769e18ace64cce29a9", + "0x386d0fdb243f20e8b2d2683686baa315b16cc81f7b80ea58f69b628e4e047a32", + "0x2c72143b520c4ad826972fc0725a8eeac590188b683cbb0cf103e5be60349607", + "0x0f7d10d27357b2f69b63cc808b76faf4c3d3bbd6dae59a10930399f6a7ab476b" + ], + "size": "0x0", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "nonce": "0x0000000000000000" + }, + "id": 2 +} +``` + +--- + +## `eth_getBlockTransactionCountByNumber` + +This method provides the total number of transactions included in a specific block, +identified by its block number. It's a useful query for understanding the volume of +transactions processed in a particular block. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +**QUANTITY** - number of transactions in the specified block, encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getBlockTransactionCountByNumber", + "params": ["0x1d1551e"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0xb", + "id": 2 +} +``` + +--- + +## `eth_getBlockReceipts` +Fetches transaction receipts for all transactions in a specified block, +offering comprehensive details such as the transaction status, gas used, and event logs. + +#### Parameters + +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +Array of transaction receipt objects, each containing detailed information about a transaction included in the specified block. + +- **transactionHash**: DATA, 32 bytes - hash of the transaction. +- **transactionIndex**: QUANTITY - index of the transaction in the block. +- **blockHash**: DATA, 32 bytes - hash of the block containing the transaction. +- **blockNumber**: QUANTITY - block number. +- **from**: DATA, 20 bytes - address of the sender. +- **to**: DATA, 20 bytes - address of the receiver. Can be `null` for contract creation transactions. +- **cumulativeGasUsed**: QUANTITY - total amount of gas used when this transaction was executed in the block. +- **gasUsed**: QUANTITY - amount of gas used by this specific transaction. +- **contractAddress**: DATA, 20 bytes - contract address created, if the transaction was a contract creation, otherwise `null`. +- **logs**: Array\<Log\> - An array of log objects generated by this transaction. +- **status**: QUANTITY - status of the transaction, where `"0x1"` indicates success and `"0x0"` indicates failure. +- **logsBloom**: DATA - bloom filter for the logs of the block. +- **type**: QUANTITY - type of the transaction. +- **effectiveGasPrice**: QUANTITY - effective gas price paid per unit of gas. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_getBlockReceipts", + "params": ["0x1d1551e"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "jsonrpc": "2.0", + "result": [ + { + "transactionHash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "transactionIndex": "0x0", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "l1BatchTxIndex": "0x469", + "l1BatchNumber": "0x72ae1", + "from": "0x1bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "to": "0x9b5def958d0f3b6955cbea4d5b7809b2fb26b059", + "cumulativeGasUsed": "0x0", + "gasUsed": "0x2b9bcb", + "contractAddress": null, + "logs": [ + { + "address": "0x000000000000000000000000000000000000800a", + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + "0x0000000000000000000000001bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "0x0000000000000000000000000000000000000000000000000000000000008001" + ], + "data": "0x0000000000000000000000000000000000000000000000000001011c8f80b6c0", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "l1BatchNumber": "0x72ae1", + "transactionHash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "transactionIndex": "0x0", + "logIndex": "0x0", + "transactionLogIndex": "0x0", + "logType": null, + "removed": false + }, + { + ... + }, + ... + ], + "l2ToL1Logs": [], + "status": "0x1", + "root": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "type": "0x2", + "effectiveGasPrice": "0x17d7840" + },} + ], + "id": 2 +} +``` + +--- + +## `eth_getBlockTransactionCountByHash` + +This method returns the number of transactions included in a block, identified by the block's hash. +It's particularly useful for determining the transaction volume within a specific block without +retrieving the transactions themselves. + +#### Parameters + +1. **DATA, 32 bytes** - hash of the block for which the transaction count is requested. This should be provided as a hexadecimal string. + +#### Returns + +**QUANTITY** - number of transactions in the block, encoded as a hexadecimal string. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getBlockTransactionCountByHash", + "params": ["0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0xb", + "id": 2 +} +``` + +--- + +## `eth_getCode` + +Retrieves the code at a specific address at an optional block. + +#### Parameters + +1. **DATA, 20 bytes** - The Ethereum address in hexadecimal format from which to retrieve the code. +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +**DATA** - The code at the given address in the specified block, returned as a hexadecimal string. If the address is not a contract a `0x` is returned. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getCode", + "params": [ + "0x0cBE9d8a007ac5A090Ebdf044b688Fa8dfD862c3" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x00040000000000020000008003000039000000400030043f000000000301001900000060033002700000005d0330019700000001022001900000002a0000c13d000000040230008c0...", + "id": 2 +} +``` + +--- + +## `eth_getStorageAt` + +Retrieves the value from a storage position at a given address. + +#### Parameters + +1. **DATA, 20 bytes** - address +1. **QUANTITY** - index position of the storage slot in hexadecimal format, starting from `0x0`. +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +**DATA** - the value at this storage position. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getStorageAt", + "params": ["0x0cBE9d8a007ac5A090Ebdf044b688Fa8dfD862c3", "0x0", "latest"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x0000000000000000000000000000000000000000000000000000000000000000", + "id": 2 +} +``` + +--- + +## `eth_getTransactionCount` + +Gets the number of transactions sent from an address. + +#### Parameters + +1. **DATA, 20 bytes** - address +1. :display-partial{path="/build/api-reference/_partials/_quantity-tag"} + +#### Returns + +**QUANTITY** - integer of the number of transactions sent from this address. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getTransactionCount", + "params": [ + "0x0f54f47bf9b8e317b214ccd6a7c3e38b893cd7f0", + "latest" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x0", + "id": 2 +} +``` + +--- + +## `eth_getTransactionByHash` + +Retrieves a transaction by its hash. + +#### Parameters + +1. **DATA, 32 bytes** - hash of a transaction. + +#### Returns + +Object - A transaction object, or null when no transaction was found: + +- **blockHash**: DATA, 32 Bytes - hash of the block where this transaction was in. null when its pending. +- **blockNumber**: QUANTITY - block number where this transaction was in. null when its pending. +- from: DATA, 20 Bytes - address of the sender. +- **gas**: QUANTITY - gas provided by the sender. +- **gasPrice**: QUANTITY - gas price provided by the sender in Wei. +- **hash**: DATA, 32 Bytes - hash of the transaction. +- **input**: DATA - the data send along with the transaction. +- **nonce**: QUANTITY - the number of transactions made by the sender prior to this one. +- **to**: DATA, 20 Bytes - address of the receiver. null when its a contract creation transaction. +- **transactionIndex**: QUANTITY - integer of the transactions index position in the block. null when its pending. +value: QUANTITY - value transferred in Wei. +- **v**: QUANTITY - ECDSA recovery id +- **r**: QUANTITY - ECDSA signature r +- **s**: QUANTITY - ECDSA signature s + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getTransactionByHash", + "params": [ + "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "hash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "nonce": "0x14c", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "transactionIndex": "0x0", + "from": "0x1bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "to": "0x9b5def958d0f3b6955cbea4d5b7809b2fb26b059", + "value": "0x398dd06d5c8000", + "gasPrice": "0x17d7840", + "gas": "0x7fcf94", + "input": "0xd7570e450000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000b49cfac038353ecd00000000000000000000000000000000000000000000000000000000660c4618000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000398dd06d5c800000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000ae1146c4b2aecd980451a67717c33050680e085b00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000005aea5775959fbc2557cc8789bc1bf90a239d9a910000000000000000000000001bc3366b3664c01b8687b1efcfc6478d9351a8a900000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000", + "v": "0x0", + "r": "0x5502a500c68407013cc2fbfeac220afdcb4ad1d997ba7c85d47d5622598d65d8", + "s": "0x5573ac964036b6ac7aab5bf0024134763cb86decbfcfeb2fca7becb9dab96355", + "type": "0x2", + "maxFeePerGas": "0x202fbf0", + "maxPriorityFeePerGas": "0x0", + "chainId": "0x144", + "l1BatchNumber": "0x72ae1", + "l1BatchTxIndex": "0x469" + }, + "id": 2 +} +``` + +--- + +## `eth_getTransactionByBlockHashAndIndex` + +Retrieves a transaction by block hash and transaction index position. + +#### Parameters + +1. **DATA, 32 bytes** - hash of a block. +1. **QUANTITY** - integer of the transaction index position, starting from `0x0`. + +#### Returns + +The response contains detailed information about the transaction, see [`eth_getTransactionByHash`](#eth_gettransactionbyhash). + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getTransactionByBlockHashAndIndex", + "params": [ + "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "0x0" + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "hash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "nonce": "0x14c", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "transactionIndex": "0x0", + "from": "0x1bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "to": "0x9b5def958d0f3b6955cbea4d5b7809b2fb26b059", + "value": "0x398dd06d5c8000", + "gasPrice": "0x17d7840", + "gas": "0x7fcf94", + "input": "0xd7570e450000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000b49cfac038353ecd00000000000000000000000000000000000000000000000000000000660c4618000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000398dd06d5c800000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000ae1146c4b2aecd980451a67717c33050680e085b00000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000005aea5775959fbc2557cc8789bc1bf90a239d9a910000000000000000000000001bc3366b3664c01b8687b1efcfc6478d9351a8a900000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000", + "v": "0x0", + "r": "0x5502a500c68407013cc2fbfeac220afdcb4ad1d997ba7c85d47d5622598d65d8", + "s": "0x5573ac964036b6ac7aab5bf0024134763cb86decbfcfeb2fca7becb9dab96355", + "type": "0x2", + "maxFeePerGas": "0x202fbf0", + "maxPriorityFeePerGas": "0x0", + "chainId": "0x144", + "l1BatchNumber": "0x72ae1", + "l1BatchTxIndex": "0x469" + }, + "id": 2 +} +``` + +--- + +## `eth_getTransactionReceipt` + +Retrieves the receipt of a transaction by transaction hash. + +#### Parameters + +1. **DATA, 32 bytes** - unique hash of the transaction. + +#### Returns + +Object - A transaction receipt object, or null when no receipt was found: + +- **transactionHash** : DATA, 32 Bytes - hash of the transaction. +- **transactionIndex**: QUANTITY - integer of the transactions index position in the block. +- **blockHash**: DATA, 32 Bytes - hash of the block where this transaction was in. +- blockNumber: QUANTITY - block number where this transaction was in. +- **from**: DATA, 20 Bytes - address of the sender. +- **to**: DATA, 20 Bytes - address of the receiver. null when its a contract creation transaction. +- **cumulativeGasUsed** : QUANTITY - The total amount of gas used when this transaction was executed in the block. +- **effectiveGasPrice** : QUANTITY - The sum of the base fee and tip paid per unit of gas. +- **gasUsed** : QUANTITY - The amount of gas used by this specific transaction alone. +contractAddress : DATA, 20 Bytes - The contract address created, if the transaction was a contract creation, otherwise null. +- **logs**: Array - Array of log objects, which this transaction generated. +- **logsBloom**: DATA, 256 Bytes - Bloom filter for light clients to quickly retrieve related logs. +- **type**: QUANTITY - integer of the transaction type, 0x0 for legacy transactions, 0x1 for access list types, 0x2 for dynamic fees. + +It also returns either: + +- **root**: DATA 32 - bytes of post-transaction stateroot (pre Byzantium) +- **status**: QUANTITY - either `0x1` (success) or `0x0` (failure) + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_getTransactionReceipt", + "params": ["0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda"] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "transactionHash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "transactionIndex": "0x0", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "l1BatchTxIndex": "0x469", + "l1BatchNumber": "0x72ae1", + "from": "0x1bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "to": "0x9b5def958d0f3b6955cbea4d5b7809b2fb26b059", + "cumulativeGasUsed": "0x0", + "gasUsed": "0x2b9bcb", + "contractAddress": null, + "logs": [ + { + "address": "0x000000000000000000000000000000000000800a", + "topics": [ + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef", + "0x0000000000000000000000001bc3366b3664c01b8687b1efcfc6478d9351a8a9", + "0x0000000000000000000000000000000000000000000000000000000000008001" + ], + "data": "0x0000000000000000000000000000000000000000000000000001011c8f80b6c0", + "blockHash": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "blockNumber": "0x1d1551e", + "l1BatchNumber": "0x72ae1", + "transactionHash": "0xb2adc4d2b3203e186001dc37fdf02cc8e772518425d263adc6a17dbddff3bfda", + "transactionIndex": "0x0", + "logIndex": "0x0", + "transactionLogIndex": "0x0", + "logType": null, + "removed": false + }, + ... + ], + "l2ToL1Logs": [], + "status": "0x1", + "root": "0x5046bdc714b2a9b40e9fbfdfc5140371c1b03b40335d908de92a7686dcc067e9", + "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "type": "0x2", + "effectiveGasPrice": "0x17d7840" + }, + "id": 2 +} +``` + +--- + +## `eth_protocolVersion` + +Returns the current Ethereum protocol version. + +#### Parameters + +None + +#### Returns + +**String** - A single string indicating the protocol version. +The version is prefixed with an identifier +(e.g. "zks" for zkSync) followed by a version number. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_protocolVersion", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "zks/1", + "id": 2 +} +``` + +--- + +## `eth_sendRawTransaction` + +Submits a pre-signed transaction for broadcast to the network. + +#### Parameters + +1. **DATA** - The complete, signed transaction data. + +#### Returns + +**DATA, 32 bytes** - A single string that is the hash of the transaction if it has been successfully submitted to the network. +This hash can be used to track the transaction's inclusion in a block and +subsequent execution status. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_sendRawTransaction", + "params": ["0xf86c808504a817c80082520894095e7baea6a6c7c4c2dfeb977efac326af552d870a868e8..."] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "0x2f5d6a8af654c249bc487e7c7b926a3f3f165b575a6485a487f12c7a9e3c8e45", + "id": 2 +} +``` + +--- + +## `eth_accounts` + +Returns a list of addresses owned by the client. + +#### Parameters + +None + +#### Returns + +**Array of DATA, 20 bytes** - An array of account addresses owned by the client. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 2, + "method": "eth_accounts", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": [], + "id": 2 +} +``` + +--- + +## `eth_feeHistory` + +Retrieves the fee history for the requested blocks. + +#### Parameters + +1. **uint64** - the number of the blocks to check. +1. **QUANTITY** - the latest block number. +1. **Array of float32** - The percentiles of transaction fees to return. + +#### Returns + +Object containing the following fields: + +- **oldestBlock**: QUANTITY - block number in hex of the oldest block queried. +- **baseFeePerGas**: Array of QUANTITY - An array of base fees per gas, represented in hex, for each block. +- **gasUsedRatio**: Array of Float - An array of ratios of gas used by each block, represented as floats. +- **reward**: Array of Array\<QUANTITY\> - An array of arrays containing the transaction fee rewards at specified percentiles, each represented in hex. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "jsonrpc": "2.0", + "id": 1, + "method": "eth_feeHistory", + "params": [ + "10", + "0x3039", + [25.0, 50.0, 75.0] + ] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "oldestBlock": "0x302a", + "baseFeePerGas": [ + "0xee6b280", + "0xee6b280", + "0xee6b280", + ], + "gasUsedRatio": [ + 0.0, + 0.0, + 0.0, + ], + "reward": [ + ["0x0", "0x0", "0x0"], + ["0x0", "0x0", "0x0"], + ["0x0", "0x0", "0x0"], + ] + }, + "id": 1 +} +``` + +## `web3_clientVersion` + +**Note:** The `sha3` method is intentionally omitted from the main server implementation, +as it can be readily implemented on the client side if necessary. + +Retrieves the version of the client software. + +#### Parameters + +None + +#### Returns + +**String** - The client version supported by the node. +The version is prefixed with an identifier (e.g. "zkSync" for zkSync) followed by a version number. + +#### Example Request + +```sh +curl --request POST \ + --url https://mainnet.era.zksync.io/ \ + --header 'Content-Type: application/json' \ + --data '{ + "id": 1, + "jsonrpc": "2.0", + "method": "web3_clientVersion", + "params": [] + }' +``` + +#### Example Response + +```json +{ + "jsonrpc": "2.0", + "result": "zkSync/v2.0", + "id": 1 +} +``` diff --git a/content/00.build/70.api-reference/40.pub-sub-rpc.md b/content/00.build/70.api-reference/40.pub-sub-rpc.md new file mode 100644 index 00000000..a70861ea --- /dev/null +++ b/content/00.build/70.api-reference/40.pub-sub-rpc.md @@ -0,0 +1,80 @@ +--- +title: PubSub JSON-RPC API +description: Methods to subscribe/unsubscribe to events and receive notifications on zkSync Era. +--- + +Clients can subscribe to specific events and receive notifications, +thus avoiding the need to poll. zkSync is fully compatible with [Geth's pubsub API](https://geth.ethereum.org/docs/interacting-with-geth/rpc/pubsub), +except for the `syncing` subscription. + +The WebSocket URL is `wss://mainnet.era.zksync.io/ws` + +## `eth_subscribe` + +Creates a new subscription for events. + +#### Parameters + +1. **String** - Name of the subscription. Valid names include "newHeads" for new block headers, "logs" for new log entries, +and others depending on the client's capabilities. +1. **Object** - (Optional) Filter conditions for the subscription, applicable for subscriptions like "logs" where specific event criteria can be set. + +#### Returns + +**DATA** - A subscription ID used to identify and manage the subscription. + +#### Example Request + +```sh +{ + "id": 1, + "jsonrpc": "2.0", + "method": "eth_subscribe", + "params": ["newHeads"] +} +``` + +#### Example Response + +```json +{ + "id": 1, + "jsonrpc": "2.0", + "result": "0x9cef478923ff08bf67fde6c64013158d" +} +``` + +--- + +## `eth_unsubscribe` + +Cancels an existing subscription. + +#### Parameters + +1. **DATA** - The ID of the subscription to cancel, as returned by `eth_subscribe`. + +#### Returns + +**Boolean** - `true` if the subscription was successfully cancelled, `false` otherwise. + +#### Example Request + +```sh +{ + "id": 1, + "jsonrpc": "2.0", + "method": "eth_unsubscribe", + "params": ["0x9cef478923ff08bf67fde6c64013158d"] +} +``` + +#### Example Response + +```json +{ + "id": 1, + "jsonrpc": "2.0", + "result": true +} +``` diff --git a/content/00.build/70.api-reference/_dir.yml b/content/00.build/70.api-reference/_dir.yml new file mode 100644 index 00000000..5a779f0d --- /dev/null +++ b/content/00.build/70.api-reference/_dir.yml @@ -0,0 +1 @@ +title: API Reference diff --git a/content/00.build/70.api-reference/_partials/_call-request-params.md b/content/00.build/70.api-reference/_partials/_call-request-params.md new file mode 100644 index 00000000..260526c9 --- /dev/null +++ b/content/00.build/70.api-reference/_partials/_call-request-params.md @@ -0,0 +1,20 @@ +--- +title: CallRequest Params +--- + +**CallRequest** - object +::collapsible + +- **from**: DATA, 20 bytes - Sender address. Arbitrary if not provided. +- **to**: DATA, 20 bytes - Recipient address. Required for `eth_call`. +- **gas**: QUANTITY - Gas limit for the transaction. Defaults if not provided. +- **gas_price**: QUANTITY - Gas price for the transaction. Defaults if not provided. +- **max_fee_per_gas**: QUANTITY - Maximum fee per unit of gas. +- **max_priority_fee_per_gas**: QUANTITY - Maximum priority fee per unit of gas. +- **value**: QUANTITY - Value transferred in the transaction. None for no transfer. +- **data / input**: DATA - Data sent with the transaction. Empty if not provided. +- **nonce**: DATA, 32 bytes - Transaction nonce. +- **transaction_type**: QUANTITY, 8 bytes - Type of the transaction. +- **access_list**: AccessList - EIP-2930 access list. +- **eip712_meta**: Eip712Meta - EIP-712 meta information. +:: diff --git a/content/00.build/70.api-reference/_partials/_filter-object.md b/content/00.build/70.api-reference/_partials/_filter-object.md new file mode 100644 index 00000000..84814865 --- /dev/null +++ b/content/00.build/70.api-reference/_partials/_filter-object.md @@ -0,0 +1,16 @@ +--- +title: Filter Object +--- + +**Filter** - Object containing various fields to specify the criteria for filtering events + +::collapsible + +- **fromBlock**: QUANTITY | TAG - The starting block (inclusive) to filter events from. Default is `"latest"`. +- **toBlock**: QUANTITY | TAG - The ending block (inclusive) to filter events up to. Default is `"latest"`. +- **address**: DATA | Array, 20 bytes - The contract address(s) to filter events from. +- **topics**: Array of Data - An array of topics to filter events by. +Each element can be a topic to match, or `null` to match any topic in that position. +- **blockHash**: DATA, 32 bytes - Filters events from a specific block hash, only allowing a single block to be specified. +Note that using `blockHash` will override any specified `fromBlock` and `toBlock` fields. +:: diff --git a/content/00.build/70.api-reference/_partials/_quantity-tag.md b/content/00.build/70.api-reference/_partials/_quantity-tag.md new file mode 100644 index 00000000..707d4a4c --- /dev/null +++ b/content/00.build/70.api-reference/_partials/_quantity-tag.md @@ -0,0 +1,5 @@ +--- +title: Quantity | Tag +--- + +**QUANTITY | TAG** - integer block number, or the string "latest", "earliest", "pending", "safe" or "finalized", see the [default block parameter](/build/api-reference/conventions#the-default-block-parameter) diff --git a/content/00.build/70.api-reference/_partials/_trace-object.md b/content/00.build/70.api-reference/_partials/_trace-object.md new file mode 100644 index 00000000..ea8f6c5c --- /dev/null +++ b/content/00.build/70.api-reference/_partials/_trace-object.md @@ -0,0 +1,22 @@ +--- +title: Trace Object +--- + +#### Trace Object + +::collapsible + +- **type**: String - The type of operation (e.g., `Call`, `Create`, etc.), indicating the nature of the trace. +- **from**: DATA, 20 bytes - address of the account that initiated the operation. +- **to**: DATA, 20 bytes - recipient address of the call. +For `Create` operations, this field is absent as the contract being created doesn't have an address until after the transaction completes. +- **gas**: QUANTITY, 32 bytes - amount of gas provided for the operation. +- **gasUsed**: QUANTITY, 32 bytes - amount of gas used by the operation. +- **value**: QUANTITY, 32 bytes - amount of Ether transferred during the operation. +- **output**: DATA - output from the operation. For operations that don't return data or failed, this is typically `0x`. +- **input**: DATA - data sent into the call or contract creation. +- **error**: String - error message if the operation failed. +- **revertReason**: String - reason provided by a `revert` operation, if applicable. +- **calls**: Array\<Object\> - array of nested calls made by this operation. This field is recursive, +containing further traces of calls made by the traced operation. +:: diff --git a/content/00.build/90.contributing-to-documentation/10.index.md b/content/00.build/90.contributing-to-documentation/10.index.md new file mode 100644 index 00000000..7b3f281c --- /dev/null +++ b/content/00.build/90.contributing-to-documentation/10.index.md @@ -0,0 +1,51 @@ +--- +title: Overview +description: Explore how to contribute to zkSync's open-source projects and community. +--- + +zkSync is an open-source project. We champion community-driven development, which means you, +from any corner of the world, can contribute to shaping zkSync's future. + +This section outlines how you can enhance our documentation, engage with the zkSync community, +and contribute to our other open-source projects. + +### Edit Existing Content + +We welcome your edits to any content on the zkSync Docs website. To contribute changes, +you will need a [GitHub account](https://github.com/signup). + +For minor edits, use the "Edit this page" link found on pages within the Table of Contents on the right side of the page. + +To suggest changes without directly editing, submit a GitHub issue via the "Share feedback" link, +also located in the Table of Contents sidebar. + +The best way to contribute is by [forking the zksync-docs repo](%%zk_git_repo_zksync-docs%%/fork), +making changes in a branch, and then submitting a PR. +To start, follow the README in the project repo and read the [Contribution Guidelines](/build/contributing-to-documentation/contribution-guidelines) +to familiarize yourself with the project structure and the documentation editing workflow. + +### Write New Content + +For the best experience in creating new content, [fork our zksync-docs project](%%zk_git_repo_zksync-docs%%/fork) +and set up a local project on your machine. If you are unfamiliar with the forking workflow, you can learn more about it +from [GitHub's articles on Forking](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/about-forks). + +As you write new content, please adhere to our +[Contribution Guidelines](/build/contributing-to-documentation/contribution-guidelines) +and consult the [Documentation Style Guide](/build/contributing-to-documentation/documentation-styleguide) +to maintain consistency across our documentation. + +### Submit a Community Tutorial or Guide + +The zksync-docs project primarily focuses on documentation that helps readers understand zkSync and develop in the ecosystem. +If your guide or tutorial includes using another tool or service with zkSync, consider submitting it to our Community content. +These documents, while adjacent to our technical documentation, are hosted in a separate project repo on GitHub. + +### Showcase Your Projects Built on zkSync + +We're excited to see new projects developed by our community within the zkSync ecosystem! +If you've released a project recently, we'd love to hear about it. +Our [zkSync Community Hub Discussions](https://github.com/zkSync-Community-Hub/zksync-developers) +has a section where you can +[submit your project](https://github.com/zkSync-Community-Hub/zksync-developers/discussions/new?category=show-and-tell) +for the community to discover. diff --git a/content/00.build/90.contributing-to-documentation/20.contribution-guidelines.md b/content/00.build/90.contributing-to-documentation/20.contribution-guidelines.md new file mode 100644 index 00000000..cd14a3eb --- /dev/null +++ b/content/00.build/90.contributing-to-documentation/20.contribution-guidelines.md @@ -0,0 +1,103 @@ +--- +title: Contribution Guidelines +description: Learn how to contribute to zkSync Docs +--- + +## Fork the project + +Make a [fork of the zksync-docs project](https://github.com/matter-labs/zksync-docs/fork) and create your branches from the default `main` branch. + +If your PR is still a work in progress, consider putting it into a [Draft status](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-stage-of-a-pull-request#converting-a-pull-request-to-a-draft). +Once your PR is ready for review, switch it back to an active PR request +and any reviewers already attached will automatically get a notification. + +## Commit conventions + +This project uses [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) standards. +For changes that are **code related**, use the `fix:`, `feat:`, or `chore:` tags in your commits. +For typo or document related changes, please use the `docs:` tag. + +```sh +git commit -m "docs: fix typo in guide" +``` + +### Signed git commits + +Your git commits need to be [signed with a verified signature](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification). + +1. Follow the instructions to + [generate a signing key](https://docs.github.com/en/authentication/managing-commit-signature-verification/generating-a-new-gpg-key). +1. [Add the key to your GitHub account](https://docs.github.com/en/authentication/managing-commit-signature-verification/adding-a-gpg-key-to-your-github-account). +1. [Tell git about your signing key](https://docs.github.com/en/authentication/managing-commit-signature-verification/telling-git-about-your-signing-key). + +## Contribution workflow + +To set up and run the project locally, consult the root [`README.md`](%%zk_git_repo_zksync-docs%%?tab=readme-ov-file#-zksync-developer-documentation) file. + +### Use VSCode + +To take full advantage of linting and formatting features, it is highly +recommended to use [VSCode](https://code.visualstudio.com/) and install the recommended extensions. +To manually install the recommended extensions, go to the Extensions tab +in VSCode and search for `@recommended`. + +--- +## What the project uses + +zkSync docs is built with Vue and Nuxt framework, utilizing various Nuxt Modules for content development. +Familiarize yourself with their documentation to provide you with the full capability of contributing. + +### Nuxt + +Nuxt is a Vue framework for building applications. +Visit the [Nuxt documentation](https://nuxt.com/docs/getting-started/introduction) +for an introduction to its structure and development process. + +#### Debugging with Nuxt + +Use Nuxt's [DevTools](https://devtools.nuxt.com/) for debugging. +A button with the Nuxt icon :u-icon{name="i-simple-icons-nuxtdotjs"} +appears at the bottom center of the viewport when running locally, opening a helpful panel. + +![Nuxt DevTools](/images/nuxt-debugger.png) + +### Nuxt Content + +Nuxt Content allows building with markdown files in a `/content` folder. +It supports [Common Markdown with additional features](https://content.nuxt.com/usage/markdown). + +A VSCode extension is recommended for proper syntax highlighting and is +included in the workspace list of recommended plugins to install. + +Navigation of markdown files is automatically generated according to a [naming scheme](https://content.nuxt.com/usage/content-directory) +that uses numbering to order pages. +We use a double digit numbering scheme. + +#### Using Vue Components in markdown + +Vue components can be used within markdown files, although their syntax is different. +Components within markdown are identified with `::`. +Components placed in the `/components/content` are automatically imported for use in markdown files in `/content`. +Refer to the Nuxt Content page on [Vue components section](https://content.nuxt.com/usage/markdown#vue-components) +to learn how the Vue markdown syntax works. + +Vue components that are not within the `/components/content` directory +need to be globally configured to use in markdown content. + +### Nuxt UI + +[Nuxt UI](https://ui.nuxt.com/components/) along with +[Nuxt UI Pro](https://ui.nuxt.com/pro/components) are available to build UI. + +While Nuxt UI components are a part of the project, they need to be configured in +`nuxt.config.ts` to make them available in `/content` markdown files. +See [the current configuration](https://github.com/matter-labs/docs-nuxt-template/blob/main/nuxt.config.ts#L35) +to discover what components are already loaded. + +Local development messages regarding `NUXT_UI_PRO_LICENSE` can be ignored, it will not affect local development and testing. + +### Styling + +Styling relies on Nuxt UI and [Tailwind](https://tailwindcss.com/). +Adhere to Tailwind's [Utility-First Fundamentals](https://tailwindcss.com/docs/utility-first), +avoiding custom styles in components with the use of the `@apply` feature. diff --git a/content/00.build/90.contributing-to-documentation/30.documentation-styleguide.md b/content/00.build/90.contributing-to-documentation/30.documentation-styleguide.md new file mode 100644 index 00000000..453f2ba2 --- /dev/null +++ b/content/00.build/90.contributing-to-documentation/30.documentation-styleguide.md @@ -0,0 +1,256 @@ +--- +title: Documentation Styleguide +description: A comprehensive guide on zkSync documentation standards, including writing style, Markdown conventions, code snippets, and documentation categorization. +--- + +This guide outlines the standards for creating zkSync documentation, +ensuring consistency in writing style, Markdown conventions, and code snippets. + +## Writing Style + +For readability across a diverse audience, including non-native English speakers, we adhere to industry best practices from: + +- [Google Developer Documentation Styleguide](https://developers.google.com/style) +- [Microsoft Writing Styleguide](https://learn.microsoft.com/en-us/style-guide/welcome/) + +It's crucial to create content that is inclusive, diverse, and timeless. Focus on: + +- [Inclusive documentation](https://developers.google.com/style/inclusive-documentation) +- [Timeless documentation](https://developers.google.com/style/timeless-documentation) +- [Bias-free communication](https://learn.microsoft.com/en-us/style-guide/bias-free-communication) +- [Global communications](https://learn.microsoft.com/en-us/style-guide/global-communications/) + +## Spelling + +Content in zkSync Docs are run through a linter for markdown formatting and spellchecking. +Some words may not pass the spellcheck linter and will need to be added to the dictionary list. +New words can be added to lists in `/cspell-config`. All words added to the dictionary are checked +for spelling only. + +## Time & Dates + +To minimize confusion due to global date format variations, adhere to the following in zkSync docs: + +- Start calendars on Mondays. +- Use the date format `month dd, yyyy`, avoiding numerals for months (e.g., January 5, 2018). + +## Types of documentation + +Following the [Diataxis](https://diataxis.fr/) framework, zkSync Docs categorizes content into + +- **Tutorials**: Step-by-step instructions to teach general skills (e.g., Deploying your first contract on zkSync Era). +- **Guides**: Task completion instructions for readers with basic knowledge (e.g., Debugging with zksync-cli). +- **References**: Detailed technical descriptions (e.g., Ethereum JSON-RPC API). +- **Explanation**: Content to deepen subject understanding +(e.g., Differences between zkSync Native Account Abstraction and Ethereum's EIP 4337). + +### Choosing a category + +Leverage the [Diataxis](https://diataxis.fr/) system when crafting a new article for zkSync Docs. +Writing without a clear category often results in unfocused content. +A well-defined focus keeps the content streamlined and clarifies the takeaway for the reader. + +While adhering to this system, it may become evident that a single article +cannot encompass all aspects you wish to convey. +Feel encouraged to create multiple articles across different categories to comprehensively address your topic. + +## Add new documentation + +To add new pages to our documentation, create new files under the `/content` directory. + +### Naming scheme + +File and folder names are prefixed with a number that Nuxt uses to handle what order pages are displayed in navigation. +We use a two-digit numbering system to allow for easier re-ordering of files. + +If you are creating a new directory, **name your first file `00.index.md`**. +This ensures there is a root path to the page. +The display name will be defined by the title in the frontmatter. + +### _dir.yml + +The `_dir.yml` allows for further configuration of the directory. +The `title` defined in this file is what defines the header for the dropdown panel in navigation. + +### Frontmatter + +All pages must have frontmatter with a `title` and `description`. +If you do not want a description for your page, simply leave the `description` value blank. + +## Images + +Add images to the `public/images/` directory to use in zkSync docs +and reference them from the `/images` path, do not include the `public/`. +Use the markdown format to display images. + +```md +![Image Alt Text](/images/example-image.png) +``` + +- Keep your image size to 600-960px wide. +- If you are displaying visual diagrams with text, use svg format for best clarity. +- Avoid using images to display only text, code or data output, use actual text. +- Every image should have a descriptive alt text. +- Optimize your image using a service like [TinyPNG](https://tinypng.com/). + +## Icons + +Utilize Nuxt UI icons with the [`UIcon` component](https://ui.nuxt.com/components/icon), following the naming pattern `i-{collection_name}-{icon_name}`. +The icon collections pre-configured for this project are `heroicons`, `simple-icons`, `devicon` and `logos`. + +You can browse the icons available in each collection on [Icones](https://icones.js.org/). This browser is also available from the Nuxt Debug tools. + +```md +:u-icon{name="i-heroicons-light-bulb"} +``` + +## Links + +Internal links are generated in relation to the `/content` directory and the name of the markdown file. + +For example, the file `/content/1.quick-start/2.deploy-contract.md` is defined as the path `/quick-start/deploy-contract`. +**Do not** add the number or file extension on the link. + +Example link to an internal page using markdown format, with an anchor tag. + +```md +[Getting Started](/build/zksync-101#install-docker) +``` + +## Markdown and Vue components + +zkSync Docs combine Markdown with Vue components though the syntax is different. +For example, if you use a [Nuxt UI Button component](https://ui.nuxt.com/components/button), the html syntax would be the following: + +```html +<UButton color="primary" variant="solid"> + I am a button +</UButton> +``` + +In markdown, the syntax changes to the following: + +```md +::u-button{ color="primary" variant="solid" } + I am a button +:: +``` + +If the Vue component is an inline slot-less component, you can use a simplified inline format. + +```html +<UButton label="Button" /> +<!-- format in markdown --> +:u-button{ label="button" } +``` + +Learn more about using Markdown and Vue components with Nuxt Content's page on [Markdown](https://content.nuxt.com/usage/markdown). + +### Content Switcher + +For content organization into tabs, utilize the `ContentSwitcher` component. +Apply the `items` prop to distribute content across tabs. +Organize these partials in a folder that matches the parent markdown file’s name, +preceded by an underscore. + +Use the Content Switcher component in a situation where you have more than one +option to display for a subject depending on the reader's choices. For example, +use the Content Switcher component for a tutorial that provides a Remix and +Atlas option. + +```md +::content-switcher +--- +items: [{ + label: 'HardHat', + partial: '_getting-started/_aPartial' +}, { + label: 'Forge', + partial: '_getting-started/_anotherpartial' +}] +--- +:: +``` + +### Callouts + +Callouts present warnings, extra detail, or references to related topics. +A callout should not contain anything essential to understanding the main content. + +#### Example +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +This is a `callout` with full **markdown** support. It can have markdown content like a link to [another page](/build/zksync-101). +:: + +#### Code + +```md +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +This is a `callout` with full **markdown** support. It can have markdown content like a link to [another page](/build/zksync-101). +:: +``` + +## Code Samples + +Use code samples where it helps to explain technical concepts. Create concise examples that are easy to read and understand, +avoid pasting an entire file of code. +Always define the language the code is represented in next to the backticks. +Additionally, you can define the name of the file to display as a header on the block. + +#### Example + +```ts [nuxt.config.ts] +export default defineNuxtConfig({ + extends: ['@nuxt/ui-pro'], + modules: ['@nuxt/ui'] +}) +``` + +#### Code + +```ts + ```ts [nuxt.config.ts] + export default defineNuxtConfig({ + extends: ['@nuxt/ui-pro'], + modules: ['@nuxt/ui'] + }) + ``` +``` + +## Localization + +Currently, zkSync Docs does not offer localized documentation. +Updates to this section will be provided as localization features become available. + +## Use of AI + +While fully AI-generated content is not accepted for zkSync Docs, +the assistance of AI tools like ChatGPT in editing content is permitted. +These tools can enhance the editing process, +although they may occasionally produce inaccurate information. +Always carefully review any AI-assisted content before finalizing. + +You can use the following as a prompt for an AI tool to help with editing: + +```text +The content is written in common markdown format. +Use the Google Developer Documentation Style Guide and the Microsoft Style Guide, emphasizing Bias-free communication. +Write in an active voice. +Do not use the definite or indefinite article with “zkSync Era”. +Assume the reader may be an international person whose first language might not be English. +Don’t use overly complex words unless the technical description is lost if changed. +Ensure all communication is bias-free, following the Microsoft Style Guide's directives for inclusiveness and fairness in language. +If the article I submit has a Header1 at the beginning, move that into a yaml frontmatter for the 'title' property. +Include a short description of the article in the yaml frontmatter in the 'description' property. +``` + +If your content includes technical descriptions and code examples, you can add the following to your prompt +to assist with formatting. + +```text +Always use the word “function” and never the word “method” when referring to Solidity code. +Always use the word “function” for functions in JavaScript and TypeScript. +Always use the word “method” for attributes of an object in JavaScript and TypeScript. +When talking about Rust, always use the word “function” for functions defined on a type, and the word “method” for functions defined on an instance of a type. +When referring to a JSON RPC URL method, always use the word “method”. +``` diff --git a/content/00.build/90.contributing-to-documentation/_dir.yml b/content/00.build/90.contributing-to-documentation/_dir.yml new file mode 100644 index 00000000..95f834ec --- /dev/null +++ b/content/00.build/90.contributing-to-documentation/_dir.yml @@ -0,0 +1 @@ +title: Contributing diff --git a/content/00.build/95.resources/20.glossary.md b/content/00.build/95.resources/20.glossary.md new file mode 100644 index 00000000..5932f450 --- /dev/null +++ b/content/00.build/95.resources/20.glossary.md @@ -0,0 +1,47 @@ +--- +title: Glossary +description: A dictionary of terms you'll encounter with zkSync +--- + +### Account Abstraction + +Account Abstraction enhances transaction authorizations by making them programmable, +offering more flexibility than Externally Owned Accounts (EOAs). +This innovation aims to enrich user experiences and broaden the scope of transaction authorizations, +differing from EIP-4337. + +### EVM Equivalent + +EVM Equivalent means that a given protocol supports every opcode +of Ethereum’s EVM down to the bytecode. +Thus, any EVM smart contract works with 100% assurance out of the box. + +### EVM Compatible + +EVM Compatible means that a percentage of the opcodes of Ethereum’s EVM are supported; +thus, a percentage of smart contracts work out of the box. +zkSync is optimized to be EVM source-code compatible (with a custom compiler), not EVM equivalent. + +### Paymaster + +A Paymaster is an account on the zkSync network that can cover transaction fees for users. +It can pay these fees directly and allow users to reimburse the fees using various digital assets. + +### SNARK (Succinct Non-Interactive Argument of Knowledge) + +SNARKs are a kind of zero-knowledge proof system that are short and quick to verify. +SNARKs are characterized by their use of the KZG (Kate, Zaverucha, and Goldberg) commitment scheme which uses elliptic curve cryptography. + +### zkSync VM + +zkSync VM is the name of the architecture that enables zero-knowledge proof generation +for the execution trace of smart contracts originally written for EVM. + +### ZK Rollup + +In zero-knowledge (ZK) rollups the batch of transactions is verified for correctness on the Ethereum network. +After the verification passes, the verified batch is considered final like any other Ethereum transaction. +This is achieved through the power of cryptographic validity proofs (commonly called zero-knowledge proofs). +With any batch of off-chain transactions, the ZK rollup operator generates a proof of validity for this batch. +Once the proof is generated, it is submitted to Ethereum to make the roll-up batch final. +In zkSync, this is done via a [SNARK](#snark-succinct-non-interactive-argument-of-knowledge), succinct non-interactive argument of knowledge. diff --git a/content/00.build/95.resources/30.audit-bug-bounty.md b/content/00.build/95.resources/30.audit-bug-bounty.md new file mode 100644 index 00000000..408f3091 --- /dev/null +++ b/content/00.build/95.resources/30.audit-bug-bounty.md @@ -0,0 +1,80 @@ +--- +title: Audits and Bug Bounty Program +description: +--- + +zkSync Era takes security seriously and as such, we have completed multiple audits in all critical parts of the +protocol. On top of that, there is an ongoing massive bug bounty program. + +## Audits + +We always ensure that all code deployed to production has been thoroughly tested before release. Our auditing and +review processes begin well before any code is deployed. We conduct internal audits, followed by independent +external audits from reputable auditors. If applicable, we also hold a public auditing contest and top it off with another independent external audit. + +Here is the list of **completed audits**: + +- Layer 1 Smart Contracts, Internal Audit, from 2022-06-14 to 2022-08-17. +- [Layer 1 Smart Contracts](https://blog.openzeppelin.com/zksync-layer-1-audit/), OpenZeppelin, from 2022-09-05 to 2022-09-30. +- [Layer 1 Diff Audit (Upgrade Audit)](https://blog.openzeppelin.com/zksync-layer-1-diff-audit/), OpenZeppelin, from 2022-11-21 to 2022-11-25. +- [Layer 1 Diff Audit (Upgrade Audit)](https://blog.openzeppelin.com/zksync-l1-diff-audit-february-2023/), OpenZeppelin, from 2023-02-06 to 2023-02-17. +- [Layer 1 Public Contest](https://code4rena.com/reports/2022-10-zksync/), Code4rena, from 2022-10-28 to 2022-11-09. +- [Layer 1 Smart Contracts](https://github.com/Secure3Audit/Secure3Academy/blob/main/audit_reports/zkSync/zkSync_L1_final_Secure3_Audit_Report.pdf), +Secure3, from 2022-10-22 to 2022-11-06. +- [WETH Bridge Audit](https://blog.openzeppelin.com/zksync-weth-bridge-audit), OpenZeppelin, from 2023-03-27 to 2023-03-31. +- [Bridge and .transfer & .send](https://blog.openzeppelin.com/zksync-bridge-and-.transfer-.send-diff-audit), OpenZeppelin, from 2023-04-24 to 2023-05-01. +- [GnosisSafeZk Assessment](https://blog.openzeppelin.com/zksync-gnosissafezk-assessment-1), OpenZeppelin, from 2023-05-22 to 2023-05-26. +- [Upgrade System](https://blog.openzeppelin.com/zksync-upgrade-system-audit), OpenZeppelin, from 2023-06-26 to 2023-06-30. +- [Layer 1 Messenger Upgrade](https://blog.openzeppelin.com/zksync-l1messenger-upgrade-audit), OpenZeppelin, from 2023-08-30 to 2023-09-14. +- [Diff and Governance Audit](https://blog.openzeppelin.com/december-diff-and-governance-audit), OpenZeppelin, from 2023-12-04 to 2023-12-22. +- Layer 2, Internal Audit, from 2022-08-17 to 2022-10-24. +- [Layer 2 Bootloader](https://blog.openzeppelin.com/zksync-bootloader-audit-report/), OpenZeppelin, from 2022-11-28 to 2022-12-23. +- [Layer 2 Fee Model and Token Bridge](https://blog.openzeppelin.com/zksync-fee-model-and-token-bridge-audit/), OpenZeppelin, from 2023-01-23 to 2023-02-17. +- [Layer 2 System Contracts Public Contest](https://code4rena.com/contests/2023-03-zksync-era-system-contracts-contest), +Code4rena, from 2023-03-10 to 2023-03-19. +- [Layer 2 Block Refactor](https://blog.openzeppelin.com/zksync-l2-block-refactor-audit), OpenZeppelin, from 2023-07-25 to 2023-07-31. +- [Keccak256 Upgrade](https://blog.openzeppelin.com/zksync-keccak256-upgrade-audit), OpenZeppelin, from 2023-10-23 to 2023-10-27. +- [Layer 1 & 2 Diff Audit](https://blog.openzeppelin.com/november-diff-audit), OpenZeppelin, from 2023-11-27 to 2023-12-05. +- [Short-Term Fee Model Changes](https://blog.openzeppelin.com/short-term-fee-model-changes-audit), OpenZeppelin, from 2023-12-06 to 2023-12-13. +- ZK Proof System, Internal Audit, from 2022-10-24 to 2022-11-18. +- [ZK Proof System](https://github.com/HalbornSecurity/PublicReports/blob/master/ZK%20Audits/MatterLabs_zkSync_Era_Circuits_Zero_Knowledge_Security_Audit_Report_Halborn_Final..pdf), +Halborn, from 2023-01-09 to 2023-03-08. +- [Smart Contract Security Assessment](https://github.com/HalbornSecurity/PublicReports/blob/master/Solidity%20Smart%20Contract%20Audits/MatterLabs_Verifier_Smart_Contract_Security_Assessment_Report_Halborn_Final.pdf), +Halborn, from July 12th, 2023 - July 20th, 2023. +- [SNARK Wrapper](https://github.com/spearbit/portfolio/blob/master/pdfs/Matter-labs-snark-wrapper-Spearbit-Security-Review.pdf), Spearbit, November 2023 +- [EIP-4844 Support](https://blog.openzeppelin.com/eip-4844-support-audit), OpenZeppelin, February 2024 + +## Bug Bounty Program + +zkSync Era has a very detailed **[Bug bounty Program on Immunefi](https://immunefi.com/bounty/zksyncera/)**. In the listing, you can +find all the information related to assets in scope, reporting, and the payout process. + +### Scope + +The bug bounty program for zkSync Era aims to identify and resolve +security vulnerabilities in our system before they can be exploited by +malicious actors. The program is open to all individuals and teams who +are interested in participating and are willing to comply with the +program's rules and guidelines. The scope of the program covers all aspects of our blockchain products, including smart contracts, +protocols, portals, and any other components that are part of our ecosystem. + +### Requirements + +1. Eligibility: The bug bounty program is open to anyone who is interested in participating and who complies with the program's rules and guidelines. +2. Responsible Disclosure: All participants must agree to follow the responsible disclosure policy and report any security vulnerabilities +they discover to our security team in a timely and responsible manner. +3. Rewards: The bug bounty program offers rewards to participants who discover and report security vulnerabilities. The rewards are +determined based on the severity of the vulnerability and are paid in USDC. +4. Reporting Guidelines: Participants must follow the reporting guidelines specified by the program. +5. No Public Disclosure: Participants must not publicly disclose any vulnerabilities they discover until after they have been resolved by +our security team. +6. No Exploitation: Attacks that the reporter has already exploited themselves, leading to damage are not eligible for a reward. +7. Legal Compliance: Participants must comply with all applicable laws and regulations, including data privacy and security laws. +8. Program Changes: We reserve the right to modify or terminate the program at any time and without prior notice. We also reserve the +right to disqualify any participant who violates the program's rules and guidelines. + +### Unscoped Bug + +If you think you have found a critical or major bug that is not covered by our existing bug bounty, please report it to us +[via the Immunefi program](https://immunefi.com/bounty/zksyncera/) regardless. We will seriously consider the impact of any issues and +may award a bounty even for out of scope assets or impacts. diff --git a/content/00.build/95.resources/40.community-channels.md b/content/00.build/95.resources/40.community-channels.md new file mode 100644 index 00000000..65fe9a08 --- /dev/null +++ b/content/00.build/95.resources/40.community-channels.md @@ -0,0 +1,57 @@ +--- +title: Community Channels +description: +--- + +Engage with our vibrant community and stay updated on the latest news, insights, and discussions through our diverse channels. Here's a quick overview: + +## **1. Developer Discussions** + +A dedicated space in GitHub for developers to discuss, collaborate, and brainstorm on technical topics. + +[Engage in GitHub Discussions](https://github.com/zkSync-Community-Hub/zksync-developers/discussions) + +--- + +## **2. Discord** + +Join real-time conversations, ask questions, and connect with both the team and fellow community members. + +[Join our Discord server](https://join.zksync.dev/) + +--- + +## **3. Twitter/X** + +Follow us for quick updates, announcements, and trends related to our platform, updates for developers, and the broader industry. + +[Follow us on Twitter](https://x.com/zksync) + +[Follow our developer-focused Twitter](https://x.com/zkSyncDevs) + +--- + +## **4. Blog** + +Delve deep into comprehensive articles, tutorials, and insights penned by our team. + +[Read our Blog](https://zksync.mirror.xyz/) + +--- + +## **5. GitHub Issues** + +The most effective way to seek technical support is by raising an issue on GitHub. Ensure you provide a comprehensive description, +relevant details, and steps to reproduce for prompt assistance. + +- **zkSync Era Issues**: Address all queries and concerns related to zkSync Era here. [Report zkSync Era Issue](https://github.com/matter-labs/zksync-era/issues) +- **era-test-node Issues**: If you're facing challenges with the era-test-node, this is the place to raise them. [Report era-test-node Issue](https://github.com/matter-labs/era-test-node/issues) +- **SDKs Issues**: For any issues related to our Software Development Kits, kindly head over to this section. [Report SDKs Issue](https://github.com/zksync-sdk) + +--- + +## **6. Telegram** + +Get instant updates and participate in mobile-friendly discussions on our official Telegram channel. + +[Join us on Telegram](https://t.me/zksync) diff --git a/content/00.build/95.resources/50.contribution-track.md b/content/00.build/95.resources/50.contribution-track.md new file mode 100644 index 00000000..e185bde4 --- /dev/null +++ b/content/00.build/95.resources/50.contribution-track.md @@ -0,0 +1,102 @@ +--- +title: Contribution Track +description: +--- + +Welcome to the zkSync Contributors Track! The purpose of this track is to accelerate your journey into the thriving zkSync development +landscape. As a fully open-source project, we +believe in community-driven development, and that means anyone from anywhere can contribute to shaping the future of zkSync. + +This track aims to guide you in discovering various aspects of our project, inspire you to +contribute in the ways that interest you most, and provide a pathway to connect with the zkSync community. + +## Open-source repositories + +Here's a list of our key open-source repositories that you can contribute to: + +### zkSync Era + +- [**zkSync Era**](https://github.com/matter-labs/zksync-era): Node implementation for zkSync Era. + +### Smart Contracts + +- [**era-contracts**](https://github.com/matter-labs/era-contracts): Submodule containing the smart contracts for zkSync Era. + +### Circuit Implementation + +- [**era-sync_vm**](https://github.com/matter-labs/era-sync_vm): Houses the circuit implementation of zkVM specifically for zkSync Era. + +### Testing and debugging + +- [**era-test-node**](https://github.com/matter-labs/era-test-node): An in-memory node designed for integration testing and debugging. + +### Development tools + +- [**zksync-cli**](https://github.com/matter-labs/zksync-cli): CLI tool that aims to simplify the development process on zkSync. +- [**hardhat-zksync**](https://github.com/matter-labs/hardhat-zksync): Hardhat plugins tailored for the zkSync Network. + +### Documentation + +- [**zksync-web-era-docs**](https://github.com/matter-labs/zksync-web-era-docs): The go-to source for official zkSync documentation. + +### SDK and explorers + +- [**block-explorer**](https://github.com/matter-labs/block-explorer): The official block explorer for navigating the zkSync network. +- [**zksync-sdk**](https://github.com/zksync-sdk): Software Development Kit for ease of integration with the zkSync network. + +Feel free to explore these repositories, and don't hesitate to contribute! + +## Why the contributors track? + +### Purpose + +- Facilitate easy entry for external contributors into zkSync's development ecosystem. +- Offer a structured approach to involvement, reducing the initial overwhelm of diving into a new project. +- Build a strong, diverse, and global community of developers who are excited about zkSync. + +### Who is it for? + +- Developers interested in blockchain and layer 2 solutions. +- Open-source enthusiasts looking to contribute to a growing project. +- Those interested in contract development, dApp development, zkSync Era, and more. + +## The track + +### Getting started + +#### 1. Introduce yourself on zkSync Discord + +- Join our [Discord channel](https://discord.com/invite/QKSsp7tC2x) and say 'hi' in the `#introductions` thread. +Share what interests you and what you're hoping to learn or contribute. + +#### 2. Follow zkSync Developers on X (formerly Twitter) + +- Keep up to date with the latest news, updates, and releases by [following us on X](https://x.com/zkSyncDevs). + +#### 3. Dive into our official documentation + +- Immerse yourself in our comprehensive official documentation to acquire essential knowledge on zkSync. If +you discover a typo, syntax error, or see room for improvement, submit a Pull Request to contribute to its enhancement. + +### Dive into development + +#### 4. Deploy your first contract using zkSync-CLI + +- Familiarize yourself with the [zkSync-CLI tool](https://github.com/matter-labs/zksync-cli) and deploy your first contract on the zkSync Era testnet. + +#### 5. Tackle a 'Good First Issue' + +- Start contributing immediately by taking on a "Good First Issue" from any of our GitHub repositories. This is +a great way to get hands-on experience and a feel for the project. + +### Community engagement + +#### 6. Participate in zkSync Developer discussions + +- Join the discourse on [GitHub discussions](https://github.com/zkSync-Community-Hub/zkync-developers/discussions) + or other community forums to provide answers, ask questions, or share insights. + +#### 7. Submit a community tutorial or guide + +- Help others by sharing your knowledge. Write a tutorial or guide and submit it to our +[GitHub project](https://github.com/zkSync-Community-Hub/community-code) for [Community Code](https://code.zksync.io). diff --git a/content/00.build/95.resources/60.faq.md b/content/00.build/95.resources/60.faq.md new file mode 100644 index 00000000..8a73d6ae --- /dev/null +++ b/content/00.build/95.resources/60.faq.md @@ -0,0 +1,240 @@ +--- +title: FAQ +description: +--- + +Here you will find some of the most common questions we receive about zkSync Era. + +## What is zkSync Era? + +zkSync Era is a Zero Knowledge (ZK) rollup that supports generalized EVM compatibility for the Ethereum blockchain. The primary benefit of zkSync +Era is that developers who have created EVM dApps can port to zkSync Era effortlessly and realize +significantly lower gas fees and more transactions per second while inheriting Ethereum's security and decentralization. + +## Why zkSync Era? + +zkSync Era is a gigantic leap forward in Layer 2 technologies. It is a long-awaited improvement +that offers many never before enjoyed benefits for Ethereum developers. + +- **EVM Compatible** - zkSync is an EVM-compatible zero knowledge rollup that supports generalized EVM smart contracts. This means if you already +have EVM smart contracts, it’s super easy to port your dApp to zkSync Era. +- **Ethos Compatible** - we are very aligned with the ethos of decentralization and open source. All of our code will strive to be fully open-source +and zkSync will be executing a roadmap that will fully decentralize the sequencer and proof generation, and we will be executing a roadmap of +organizational subtractive management - that is, we will be decentralizing our organization as well. +- **Certainty** - Unlike previous methods attempting to scale Ethereum which have in some cases offered weaker security guarantees than for +L1 (e.g. sidechains, plasma, and optimistic) zkSync Era uses zero-knowledge proofs which offer _certainty_ of security. +- **Future Proof** - Ecosystem projects that adopt zkSync Era now will enjoy all future improvements without the need to change their code, in +particular coming from: + - The prover technology: hardware acceleration and [new proof systems](https://zksync.mirror.xyz/HJ2Pj45EJkRdt5Pau-ZXwkV2ctPx8qFL19STM5jdYhc). + - The compiler: integration of LLVM-enabled modern programming languages. [Learn more about our compiler toolchain](/zk-stack/components/compiler/toolchain). + - Other innovations like [ZK Chains, Hyperbridges and ZK Stack](/zk-stack/concepts/zk-chains). + +## What is the zkSync VM? + +zkSync VM is the name of the architecture that enables zero-knowledge proof generation for the execution trace of smart contracts originally +written for EVM. + +Its architecture is based on the following components: + +- zkSync VM, a Turing-complete RISC-like virtual machine optimized for proving in a ZKP circuit. It has several different implementations: + - Executor: fast native execution on CPU. + - Witness generator: native executor to generate ZKP witness. + - Prover: the actual ZKP circuit implementation. +- LLVM-based compiler with: + - Solidity frontend (more precisely: Yul frontend). + - Vyper frontend. + - zkSync VM backend. +- Special-purpose circuits (heavily relying on PLONK’s custom gates and lookup tables) as “precompiles” for computationally intensive +operations, such as: + - Non-algebraic hashes (Keccak, SHA256, Blake2). + - Storage access (Merkle paths). + - Elliptic curve pairing. +- Recursive aggregation circuit (combines the proofs from the above-mentioned parts). + +### zkSync VM vs EVM + +Apart from the opcodes and gas metering disparity, zkSync VM strictly inherits the EVM programming model and its invariants, including the ABI +calling conventions. One important thing to emphasize is that the zkVM supports rollbacks and +provably revertible transactions. It guarantees +mutual protection: users can not stall the network with bombardment by revertible transactions, and +the escape hatch (priority queue) protects the user’s ability to include any transactions into the blocks. + +As a result, developers can fully rely on the censorship-resistance provided by L1 without having to introduce any changes related to the +escape-hatch mechanism. This means that assets in a zkRollup account on zkSync will have exactly the same security guarantees as on L1. + +### EVM Improvements + +While maintaining maximum compatibility, the zkSync Era's zkEVM has significant improvements over the EVM that increase adoption and benefit +our ecosystem projects. + +- **Our compiler is based on LLVM**. LLVM-based compilers (Low-Level Virtual Machine) have become the default compiler for Mac OS X, iOS, FreeBSD, +and Android systems and are among the most widely used because they: + - Enable us to improve the efficiency over the original EVM bytecode because with LLVM we can take advantage of the many optimizations and + tools available in this mature ecosystem. + - Pave the way for us to add support for integrating codebases written in other programming languages with LLVM frontend. By + doing so, developers can build dApps and use blockchains in ways that are currently not possible. +- **Account Abstraction is implemented natively in our zkEVM**. This is a long-anticipated feature in the Ethereum dev community which improves +developer adoption and user experience in a number of ways: + - Native support for smart contracts wallets (like Clave), which is critical for onboarding mainstream users. + - Much better UX for multisigs. + - Transaction fees can be paid in any token using [paymasters](/build/developer-reference/account-abstraction/paymasters). + - Protocols can now subsidize gas for users from their smart contracts or even enable gasless transactions. + - Transaction batches (multicall) can be confirmed in one click (big UX problem on Ethereum today). + - Learn more about [account abstraction support in zkSync Era](/build/developer-reference/account-abstraction). + +### EVM Compatibility + +There is a lot of confusion amongst the community with regard to the impacts of being EVM Compatible +versus EVM Equivalent. First, let’s define what is meant by the two. + +- **EVM Equivalent** means that a given protocol supports every opcode of Ethereum’s EVM down to the bytecode. Thus, any EVM smart contract works +with 100% assurance out of the box. +- **EVM Compatible** means that a percentage of the opcodes of Ethereum’s EVM are supported; thus, a percentage of smart contracts work out of the box. + +zkSync is optimized to be EVM _compatible_ not EVM _equivalent_ for three primary reasons: + +1. Creating a generalized circuit for EVM equivalence down to the bytecode would be prohibitively expensive and time-consuming. +2. Building on what we learned with zkSync Lite, we were able to design a system optimized for performance and provability in ZK. +3. The opcodes we’ve chosen NOT to support are deprecated by Ethereum itself, or rarely used. In the case a project needs them, modifications to +work with zkSync are minimal and do not generate a need for a new security audit. + +Almost every smart contract written for EVM will be supported by zkSync Era and will hold all key security invariants so that no additional security +re-auditing will be required in most cases. + +There are a few other distinctions, for example, gas metering will be different (as is the case for other L2s as well). Some EVM’s cryptographic +precompiles (notably pairings and RSA) won’t be available in the very first release but will be implemented soon after the launch, with pairing +being a priority to allow both ZK Chains and protocols like Aztec/Dark Forest to be deployed without modifications too. + +## Security expectations + +zkSync Era’s data availability layer is Ethereum. All ecosystem projects that build on zkSync Era will inherit the full security benefits of Ethereum. + +This is obviously a critically important topic for us, and the system has gone through several security audits and maintains a very detailed bug +bounty program. You can read more about [zkSync Era security in this section of the docs](/build/resources/audit-bug-bounty). + +### Triggering Security audits + +While there are a few, rarely used opcodes that we do not support, we have not found any instances with our ecosystem projects where a breaking +change was anything other than a simple refactoring of a few lines of code. None of our +ecosystem projects who have ported to zkSync have reported that any change has caused a need for a security audit. + +## What is Account Abstraction? + +At a very high level, Account Abstraction allows us to make authorizations _programmable_, enabling a greater diversity of wallet and protocol design +with use cases including: + +- The implementation of smart contract wallets that improve the user experience of private key storage and recovery +(eg. [social recovery](https://vitalik.eth.limo/general/2021/01/11/recovery.html), multisig). +- The ability to natively pay gas fees in tokens other than ETH. +- The ability for accounts to change public and private keys. +- The addition of non-cryptographic modifications, where users can require transactions to have expiry times, confirm slightly out-of-order, and more. +- Diversity in signature verification systems from the current ECDSA, including post-quantum safe signature algorithms (eg. Lamport, Winternitz). + +In other words, Account Abstraction brings about major improvements to the overall user experience, +and expands the application design space for developers. Learn more in [this blog post](https://www.argent.xyz/blog/wtf-is-account-abstraction/) by Argent. + +In zkSync Era Account Abstraction is natively implemented, meaning accounts can initiate transactions, like an EOA, but can also have +arbitrary logic implemented in them, like a smart contract. + +If you want to better understand what Account Abstraction on zkSync looks like, you can read [this section of the docs](/build/developer-reference/account-abstraction). + +## zkSync Era vs Optimistic Rollups + +Optimistic rollups utilize an optimistic approach to secure their networks. At the time of their development, they represented an important +incremental improvement over other available options. However, a widely held opinion +([including Vitalik Buterin's](https://coinculture.com/au/people/vitalik-buterin-zk-rollups-to-outperform-optimistic-rollups/)) is that optimistic +methods represent yet another temporary solution and in the long run the only permanent and truly scalable solution will be blockchains based on +Zero-Knowledge proofs. + +Optimistic rollups suffer from the following key downsides: + +- **Optimistic rollups are secured via game theory.** This method assumes all transactions are valid and then utilizes an after-the-fact game theory +mechanism to pay participants to discover fraudulent or otherwise invalid (e.g. because of bugs) transactions. Game theory is never perfect +and as with the game theory that broke with stablecoins and other systems, we just don’t think it can be relied on in the long term and at true +scale to offer the security the ecosystem needs. +zkSync Era, on the other hand, relies on math, not game theory, to provide the absolute certainty of +proof that every single transaction is valid and not fraudulent. + +- **Optimistic methods take 7 days to settle**. Settlement time is becoming an increasingly +important feature for ecosystem projects. As ecosystem projects’ needs mature, the need for as close to instant settlement will rise. With +optimistic methods, this settlement problem will not go away. It's always going to be a 7-day +settlement time because optimistic methods need 7 days for their after-the-fact game theory to conclude its challenge window. The only way around +this is to bring in third parties that provide some liquidity - but then again this is a potential security risk in trusting the liquidity providers. + +zkSync Era provides settlement in hours but with optimizations in the system we'll reduce the +settlement time without the need of projects to update their code. + +- **Optimistic rollups have no method of scaling beyond where they are now.** When optimistic methods first came out, they became popular +because they scaled Ethereum (e.g. they enabled the processing of 10x Ethereum transactions +_without degradation of security and decentralization_). The problem is that while they +can scale Ethereum by 10x now, they have no mechanism to go beyond 10x without degrading security and decentralization. + +In contrast, zkSync Era is based on zero-knowledge proofs which have important characteristics that optimistic methods do not - they can hyperscale. + +## Which Wallets are supported? + +At the moment, we support any Ethereum-based wallet like Metamask, BitKeep, TrustWallet or Zerion. You can add zkSync network to your +Metamask manually by following the instructions in the [interact with zkSync Era](/build/connect-to-zksync) page. + +## Token Listing + +We source our token information from [Coingecko](https://www.coingecko.com/en/categories/zksync-ecosystem). + +- **Adding a Token**: all tokens are identified automatically. If you wish to include a logo, pricing, or other details for your token, ensure +it is listed on Coingecko. Once listed, these details will automatically appear on Block Explorer and Bridge. It may take up to 24 hours +for updates to be reflected. +- **Top Tokens List**: Tokens are arranged in descending order based on liquidity. We do not control the order of the tokens. + +## How do I Request Funds for Testnet? + +To access Sepolia testnet funds, [you can use one of our third party faucets](/ecosystem/network-faucets). + +## How long does it take to complete a deposit transaction? + +The transactions on zkSync Era should not take more than 5 minutes. + +## Where can I see the transactions I submitted? + +Our [Block Explorer](https://explorer.zksync.io) will show everything you may need about a transaction. + +## Can someone claim the address I have for my contract in other EVM networks in zkSync Era? + +The contract address derivation formula is different from the regular EVM approach. Even if a contract is deployed from the same account address +with the same nonce, the zkSync Era contract address will not be the same as it is in another EVM network. This means, for example, that no one +will be able to claim an existing Ethereum address of your protocol to try to trick users into interacting with a malicious version of it. + +## What is Block Gas Limit on zkSync Era? + +The current value is currently set at roughly 2^32 gas. <br> +**Note**: This value is temporal and will be updated soon. + +## Can I withdraw my funds back to Ethereum? + +Yes, the bridge is two-way. You can withdraw your funds back to Ethereum. The withdrawal transaction +[will take ~24 hours, depending on the usage of the zkSync Era network](/build/resources/withdrawal-delay). + +## What is a testnet re-genesis? + +Sometimes, the team working on zkSync Era will initiate a re-genesis on testnet - a restart of the blockchain which will introduce upgrades and +return the state to the initial point. + +## Why do my contracts not compile in Windows? + +If you're using Windows, make sure to use WSL 2, since WSL 1 is known to cause trouble. + +Additionally, if you use WSL 2, make sure that your project is located in the Linux filesystem, since accessing NTFS partitions from WSL is very slow. + +## Proof sampling on testnet + +zkSync Era testnet is experiencing immense demand, but its permissionless nature leaves our developer +infrastructure vulnerable to potential DoS attack vectors. Generating ZK proofs inherently incurs costs, and a determined attacker could exploit +this by submitting a massive number of +transactions, resulting in significant expenses for us. As we currently lack a more effective method for rationing resources, we have opted to +prove a random subset of batches on the testnet – the subset fluctuates with demand but maintains a +high minimal threshold. This approach ensures the detection of errors probabilistically while conserving valuable GPU resources. + +This resource optimization not only supports a more efficient allocation of resources but also promotes a more environmentally-friendly process +compared to proving every batch on the testnet. + +Moving forward, we aim to develop better mechanisms to prevent Sybil attacks on our infrastructure, with the ultimate goal of enabling +the proof of all batches on the testnet. diff --git a/content/00.build/95.resources/70.withdrawal-delay.md b/content/00.build/95.resources/70.withdrawal-delay.md new file mode 100644 index 00000000..e5740714 --- /dev/null +++ b/content/00.build/95.resources/70.withdrawal-delay.md @@ -0,0 +1,73 @@ +--- +title: Withdrawal delay +description: +--- + +In order to prevent a quick drain of the protocol in the case a critical bug is discovered and exploited, we are introducing a block execution +delay. Each L2 block committed to L1 will have a time lock before it is executed and finalized. This means that there is enough time to verify the +effects of the transactions included in a block before the block becomes final. The zkSync team will be monitoring each block and investigating +any anomaly (e.g. rapid outflow, unusually large withdrawals, etc). + +To introduce this time lock, no changes were made to the audited smart contracts. Instead, we have used an existing Validator role that we control +and that we further restricted by pointing it to an intermediate smart contract with a time lock. The time lock is initially configured for a +**24-hour** delay, which will gradually decrease as the system matures. Changing the delay requires multiple signatures collected from several cold +wallets owned by zkSync leadership. + +This design has the following advantages: + +- Even if an attacker finds a critical bug in ZK circuits and also successfully compromises the servers running our sequencer, there is plenty of +time to detect an exploit, investigate, and freeze the protocol via governance. +- No changes were introduced to the zkSync Era contracts, so even if the intermediate contract is compromised we revert back to the original state. +- Delayed execution affects not only the standard zkSync ETH and ERC20 bridges but also any custom bridge built by a different team. +- Implementing the logic in an external governor-controlled contract makes it easy to remove this limitation later. + +## Why can't I find my withdrawal on Etherscan? + +There are several reasons why your withdrawal may be successful but you cannot see your tokens, or the transaction, on Etherscan. For example: + +1. **Delay in block confirmation**: as mentioned +in the withdrawal delay section above, successful withdrawals may be subject to an ongoing confirmation process. Consequently, you won't be +able to see your transaction on Etherscan until the confirmation process completes. + +2. **Transaction reverted**: a withdrawal may fail to appear on Etherscan if the transaction is reverted due to a conflict or issue with the smart +contract that executed the transaction. In such cases, the transaction is canceled and it won't be included on Etherscan. + +3. **Wrong address**: if a user mistakenly sends tokens to an incorrect address, the transaction will not be visible on Etherscan, and the tokens +will not arrive in the intended recipient's wallet. + +### Transactions in Etherscan + +The **Transactions** section in Etherscan displays transactions between two Ethereum addresses. Each transaction has details such as the transaction +hash, the block number, the timestamp, the sender and receiver addresses, the amount of Ether or tokens involved, and the transaction fee. These +transactions are confirmed by the network and are typically sent from one external address to another. + +On the other hand, **Internal Transactions** are initiated by a smart contract or other internal code execution within the Ethereum network. +Although these transactions can be prompted by user activity, they are not sent directly from one address to another but are instead part of the +internal workings of a smart contract. Internal transactions may involve the transfer of ETH or ERC20 tokens between different addresses within the contract. + +Withdrawals from the zkSync Era network are typically internal transactions managed by the +[zkSync Era Diamond Proxy](https://etherscan.io/address/0x32400084c286cf3e17e7b677ea9583e60a000324) +contract. These transactions are recorded in the **Internal Transactions** section in Etherscan due to their internal nature. + +In summary, the **Transactions** section in Etherscan displays transactions between external addresses, while the **Internal Transactions** +section displays transactions that occur within smart contracts. + +### How to check your internal transactions + +1. Launch your web browser and visit [Etherscan.io](https://etherscan.io/). + +2. In the search bar at the top of the page, enter the Ethereum wallet address you used to withdraw funds. This should be the address you withdrew +funds from, not the destination address. + +3. Click **Enter**. + +4. Scroll down to the **Internal Transactions** section. + +5. Look for the internal transaction that corresponds to your withdrawal. You should see a transaction that shows the withdrawal amount +coming from the bridge. Withdrawal transactions from zkSync Era appear as transactions from **zkSync Era: Diamond Proxy** to your wallet address. + +6. Once you've located the transaction, click on the **Parent Tx Hash** to view more details about the transaction, including the block number, gas +used, and sender and recipient addresses. + +If for any reason you're still unable to see your withdrawal transaction after following these steps, please contact us on Discord and we will +look into it. diff --git a/content/00.build/95.resources/_dir.yml b/content/00.build/95.resources/_dir.yml new file mode 100644 index 00000000..35ad676c --- /dev/null +++ b/content/00.build/95.resources/_dir.yml @@ -0,0 +1 @@ +title: Resources diff --git a/content/00.build/_dir.yml b/content/00.build/_dir.yml new file mode 100644 index 00000000..7478fd64 --- /dev/null +++ b/content/00.build/_dir.yml @@ -0,0 +1 @@ +title: Build diff --git a/content/10.zk-stack/00.index.md b/content/10.zk-stack/00.index.md new file mode 100644 index 00000000..6e284426 --- /dev/null +++ b/content/10.zk-stack/00.index.md @@ -0,0 +1,40 @@ +--- +title: Overview +description: This section provides an overview of the ZK Stack and zkEVM, detailing their roles in launching secure zero-knowledge rollups and forming the ZK Chains. +--- + + +## Introduction + +ZK Stack is designed to power the internet of value by providing the necessary security through blockchain technology. +It enables the launch of zero-knowledge rollups, offering an enhanced level of blockchain security. + +## Zero-Knowledge Rollups + +Zero-knowledge rollups, or ZK Rollups, leverage advanced cryptographic techniques known as zero-knowledge proofs. +These proofs ensure that each transaction within the rollup is executed correctly without revealing any transaction details. +ZK Rollups aggregate (or roll up) transaction data and submit it to a primary chain, such as Ethereum, for final validation. + +## zkEVM: Enhancing Ethereum Compatibility + +At the core of the ZK Stack is the zkEVM, which is designed to execute transactions while maintaining full compatibility with Ethereum. +This compatibility allows seamless integration and interaction with the broader Ethereum ecosystem. + +## Verification by External Validators + +One of the key advantages of ZK Rollups is their verifiability by external validators. +Unlike traditional blockchains that require running a full node to verify transactions, +ZK Rollups allow their state to be validated externally through the proof provided. +This makes it simpler and more efficient to ensure the integrity of the rollup. + +## ZK Chain: A Network of Rollups + +ZK Rollups can also be validated by other rollups, facilitating the creation of a trustless network of rollups known as the ZK Chain. +This interconnected network enhances the scalability and interoperability of blockchain systems. + +## Technical Specifications + +The document will delve deeper into the zkEVM, providing a detailed specification of its components +including the prover, compiler, and the virtual machine itself. +Additionally, it will outline the foundational elements of the ZK Chain ecosystem, +highlighting how these innovations contribute to the scalability and security of blockchain technology. diff --git a/content/10.zk-stack/05.concepts/00.transaction-lifecycle.md b/content/10.zk-stack/05.concepts/00.transaction-lifecycle.md new file mode 100644 index 00000000..beb01bc8 --- /dev/null +++ b/content/10.zk-stack/05.concepts/00.transaction-lifecycle.md @@ -0,0 +1,161 @@ +--- +title: Transaction Lifecycle +description: An in-depth guide on the transaction lifecycle within the ZK Stack, explaining the roles of the sequencer and prover, and detailing the transaction statuses and types in zkSync Era. +--- + +The ZK Stack facilitates the launch of rollups, which require certain operators like the sequencer and the prover. +These operators are responsible for creating blocks and proofs, and submitting them to the L1 contract. + +Transactions are cryptographically signed instructions from accounts that aim to update the state of the Ethereum network. +The simplest transaction involves transferring ETH from one account to another. +[Learn more about Ethereum transactions](https://ethereum.org/en/developers/docs/transactions/). + +--- +## Workflow of Transactions + +Users submit their transactions to the sequencer, whose role is to collect and execute these transactions using the zkEVM. +The sequencer also provides a soft confirmation to users about the execution of their transactions. +Users have the option to force the inclusion of their transactions by submitting them through L1. +After execution, the sequencer forwards the block to the prover, who then creates a cryptographic proof of the block's execution. +This proof, along with the necessary data, is submitted to the L1 contract. +An L1 smart contract verifies the validity of the proof and the completeness of the data submission, thereby updating the rollup's state on the contract. + +![Components](/images/zk-stack/l2-components.png) + +The zkEVM plays a crucial role similar to, yet distinct from, the traditional EVM in Ethereum. +Transactions can also be initiated via L1, which facilitates L1<>L2 communication, providing censorship resistance and enabling trustless bridges to L1. + +The sequencer collects transactions into blocks and, to enhance user experience, ensures quick soft confirmations through small block sizes. +Unlike Ethereum, [zkEVM distinguishes between blocks and batches](blocks#batch-vs-block-vs-transaction), +where batches—collections of blocks—are processed by the prover. + +Before submitting a proof, the data is sent to L1. +The method optimizes data submission by detailing only the changes in blockchain state, termed 'state diff.' +This approach makes transactions affecting the same storage slots more cost-effective. + +At the final stage, proofs are generated and sent to L1 using our Boojum proof system, +which operates efficiently on 16GB of GPU RAM, allowing for decentralization of proof generation. + +--- +## Transaction data + +Transactions in zkSync Era are [comparable to those on Ethereum](https://ethereum.org/en/developers/docs/transactions/), +allowing the use of the same wallets. +Minor differences exist, particularly regarding fee settings. +For details on fees, refer to the [fee model documentation](/zk-stack/concepts/fee-mechanism). + +Returned values from any RPC call outputting transaction details include: + +- `is_l1_originated`: `bool` +- `status`: `TransactionStatus`, one of `Pending`, `Included`, `Verified`, or `Failed`. See [Transaction statuses section](#transaction-statuses) below. +- `fee`: `U256`. See the [fee mechanism documentation](/zk-stack/concepts/fee-mechanism) for more information. +- `initiator_address`: `Address` +- `received_at`: `DateTime<Utc>` +- `eth_commit_tx_hash`: `Option<H256>` +- `eth_prove_tx_hash`: `Option<H256>` +- `eth_execute_tx_hash`: `Option<H256>` + +--- +## Contract Deployment Transactions + +Contract deployment transactions interact with the `ContractDeployer` system contract and differ from standard transactions. + +[Learn more about contract deployment in zkSync](/build/developer-reference/ethereum-differences/contract-deployment). + +--- +## Transaction statuses + +Transactions are always in one of the following statuses: + +- `Pending`: Awaiting inclusion in a block. +- `Included`: Added to a block, but the block's batch is not yet committed. +- `Verified`: Included and verified after batch commitment and execution on the Ethereum L1 network. +- `Failed`: Transaction did not verify successfully. + +For more on transaction completion and irrevocability, see the [finality documentation](finality). + +--- +## Transaction types + +zkSync Era supports a range of transaction types that are compatible with Ethereum, +yet they incorporate unique settings particularly around fee configurations. +Here’s a detailed look at the transaction types, +including legacy, EIP-2930, EIP-1559, and EIP-712, and how they are implemented in zkSync Era. + +::callout{icon="i-heroicons-information-circle" color="blue"} +When using RPC methods like [`eth_getTransactionByHash`](https://ethereum.github.io/execution-apis/api-documentation/), +the transaction type hex value is included in the output. +:: + +### Legacy: `0x0` + +This is the original Ethereum transaction format used before the introduction of typed transactions. + +### EIP-2930: `0x1` + +Implemented to mitigate risks introduced by EIP-2929, [EIP-2930: Optional access lists](https://eips.ethereum.org/EIPS/eip-2930) +adds an `accessList` to transactions, which is an array of addresses and storage keys. + +### EIP-1559: `0x2` + +Introduced in Ethereum's London update, [EIP-1559: Fee market change for ETH 1.0 chain](https://eips.ethereum.org/EIPS/eip-1559) +modifies how transaction fees are handled, replacing `gasPrice` with a base fee and allowing users to set `maxPriorityFeePerGas` and `maxFeePerGas`. + +- `maxPriorityFeePerGas`: Maximum fee users are willing to pay miners as an incentive. +- `maxFeePerGas`: Overall maximum fee, including the `maxPriorityFeePerGas` and the base fee determined by the network. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +In zkSync Era, while the EIP-1559 transaction format is supported, the `maxFeePerGas` and `maxPriorityFeePerGas` parameters are not utilized. +:: + +### EIP-712: `0x71` + +[EIP-712: Typed structured data hashing and signing](https://eips.ethereum.org/EIPS/eip-712) +enables structured data hashing and signing within transactions. +zkSync Era uses this for features like [account abstraction](/build/developer-reference/account-abstraction) and [paymasters](/build/developer-reference/account-abstraction/paymasters). + +```json +"gasPerPubdata": "1212", +"customSignature": "0x...", +"paymasterParams": { + "paymaster": "0x...", + "paymasterInput": "0x..." +}, +"factoryDeps": ["0x..."] +``` + +- `gasPerPubdata`: Specifies the maximum gas payable per byte of public data. +- `customSignature`: For transactions where the account is not an externally owned account (EOA). +- `paymasterParams`: Configuration for custom paymasters, including address and inputs. +- `factoryDeps`: Includes bytecode of contracts for deployment, essential for factory contracts. + +EIP-712 transactions are designated with a `transaction_type` of `113` due to the one-byte limit, differing from `712`. + +Instead of signing the RLP-encoded transaction, the user signs the following typed EIP-712 structure: + +- txType: `uint256` +- from: `uint256` +- to: `uint256` +-gasLimit: `uint256` +- gasPerPubdataByteLimit: `uint256` +- maxFeePerGas: `uint256` +- maxPriorityFeePerGas: `uint256` +- paymaster: `uint256` +- nonce: `uint256` +- value: `uint256` +- data: `bytes` +- factoryDeps: `bytes32[]` +- paymasterInput: `bytes` + +These fields are handled by our SDKs. + +### Priority: `0xff` + +<!-- TODO: update link --> +<!-- This transaction type is specific to zkSync Era and is used for [L1 to L2 transactions](../../build/tutorials/how-to/send-transaction-l1-l2.md), --> +<!-- highlighting the unique multi-layer interaction that does not exist on Ethereum L1. --> + +--- + +Each of these transaction types ensures that while zkSync Era remains closely aligned with Ethereum standards, +it also optimizes for its Layer 2 specific needs and functionalities. diff --git a/content/10.zk-stack/05.concepts/10.blocks.md b/content/10.zk-stack/05.concepts/10.blocks.md new file mode 100644 index 00000000..d7dfae35 --- /dev/null +++ b/content/10.zk-stack/05.concepts/10.blocks.md @@ -0,0 +1,208 @@ +--- +title: Blocks and Batches +description: Explore how zkSync Era processes transactions by grouping them into blocks and batches, the role of sealing blocks, and the importance of rollbacks in the virtual machine. +--- + +## Overview of blocks and batches + +zkSync Era processes transactions not only as individual units but also groups them into blocks and batches for efficiency and cost-effectiveness. +This section covers how transactions are grouped, the concept of sealing blocks, and why rollbacks are crucial in our virtual machine (VM). + +### Understanding L2 and L1 blocks + +**L2 blocks**, also referred to as miniblocks, are specific to the zkSync Era network and are not recorded on the Ethereum blockchain. +These blocks contain a smaller number of transactions, allowing for quick processing. + +Contrastingly, **L1 rollup blocks**, or batches, consist of several consecutive L2 blocks. +These batches compile all transactions from multiple L2 blocks in the same sequence they were processed. +The primary purpose of creating batches is to minimize the costs associated with Ethereum interactions by distributing them across numerous transactions. + +--- +## Differences between batch, block, and transaction + +To clarify these concepts visually, consider the following illustrations: + +![Block layout](/images/zk-stack/block-layout.png) +*The Block layout image displays the organization of transactions within blocks and how L2 blocks are arranged within L1 batches.* + +![Explorer example](/images/zk-stack/explorer-example.png) +*This Explorer example shows how the zkSync Era explorer represents blocks and transactions.* + +--- +## Detailed look at L2 blocks + +While L2 blocks are crucial, their importance will increase with the transition to a decentralized sequencer. +Currently, they serve mainly as a compatibility feature for tools like Metamask, which expect frequent block updates to confirm transaction statuses. + +An L2 block is generated every 1 second, encompassing all transactions received within that timeframe. +This rapid creation ensures consistent transaction processing. + +(You can check the difference between `RemainingBlock` and `EstimateTimeInSec` from +the [block countdown api endpoint](https://block-explorer-api.mainnet.zksync.io/docs#/Block%20API/ApiController_getBlockCountdown)), +and it includes all the transactions received during that time period. +This periodic creation of L2 blocks ensures that transactions are processed and included in the blocks regularly. + +### Properties of L2 blocks + +The properties of an L2 block can be observed when using the `getBlock` method from our SDKs: + +| Parameter | Description | +|------------------|----------------------------------------------------------------------------------------| +| hash | The hash of the L2 block, null if pending | +| parentHash | Refers to the hash of the parent block in L2 | +| number | The current L2 block number, null if pending | +| timestamp | UNIX timestamp for when the L2 block was formed | +| nonce | Tracks the most recent transaction by the account's counter | +| difficulty | Always returns `2500000000000000` as zkSync does not use a proof of work consensus | +| gasLimit | Maximum gas allowed in this block, always returns `2^32-1` | +| gasUsed | Actual amount of gas used in this block | +| transactions | An array of transaction objects - see [TransactionResponse interface](/sdk/js/ethers/v6/providers#gettransaction) | +| baseFeePerGas | The base fee per gas in the style of EIP1559 | + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Block number and timestamp considerations**: +Recent protocol updates have changed some block properties on zkSync Era. More information is available on the [GitHub announcement](https://github.com/zkSync-Community-Hub/zkync-developers/discussions/87). +:: + +--- +## The role of L1 batches + +L1 batches are integral to zkSync Era as they represent the unit of computation for generating proofs. +From a VM perspective, each L1 batch is akin to executing a program—the Bootloader, which processes all transactions within the batch. + +### L1 batch size and processing times + +The sealing of a batch depends on several criteria, +managed by the [conditional_sealer](https://github.com/matter-labs/zksync-era/blob/main/core/lib/zksync_core/src/state_keeper/seal_criteria/conditional_sealer.rs) +module, including transaction count, size limits, gas limits, and more. +The decision-making process is complex, often requiring a "try and rollback" approach for transactions that exceed the batch's capacity. + +The `conditional_sealer` maintains a list of `SealCriterion` which include: + +- Transaction count limit (that is, how many transactions would fit within a batch). +- Transaction size limit (that is, the total data/information within the transactions). +- L2 Gas limit. +- Published data limit (as each L1 batch must publish information about the changed slots to L1, so all the changes must + fit within the L1 transaction limit). +- L1 Gas limit (Similar to the above, but ensuring the commit, prove and execute transactions on L1 wouldn't consume more gas than available). +- Circuits Geometry limits - For certain operations like merkle transformation, there is a maximum number of circuits that can be + included in a single L1 batch. If this limit is exceeded, we wouldn't be able to generate the proof. +- Timeout (unlikely to ever be used, but ensures if there are not enough transactions to seal based on the other criteria, + the batch is still sealed so information is sent to L1). + +However, these sealing criteria pose a significant challenge because it is difficult to predict in advance whether +adding a given transaction to the current batch will exceed the limits or not. This unpredictability adds complexity to +the process of determining when to seal the block. + +### `ExcludeAndSeal` + +To handle situations where a transaction exceeds the limits of the currently active L1 batch, +we employ a "try and rollback" approach. +This means that we attempt to add the transaction to the active L1 batch, +and if we receive a `ExcludeAndSeal` response indicating that it doesn't fit, +we roll back the virtual machine (VM) to the state before the transaction was attempted. + +Implementing this approach introduces a significant amount of complexity in the `oracles` (also known as interfaces) of the VM. +These oracles need to support snapshotting and rolling back operations to ensure consistency when handling transactions that don't fit. + +--- +## Retrieving block and batch numbers + +Accessing block and batch numbers in zkSync Era is straightforward: + +- `eth_blockNumber` retrieves the latest L2 block number. +- `eth_getBlockByNumber` provides details for a specific L2 block. +- `zks_L1BatchNumber` fetches the most recent batch number, critical for understanding the scope of transactions and operations within zkSync Era. + +--- +## Deeper dive into zkSync Era's batch and block mechanisms + +This section delves into the intricate processes involved in initializing and managing L1 batches and L2 blocks within zkSync Era, +providing insights into the technical frameworks and operational protocols. + +### Initializing L1 batch + +At the start of each L1 batch, the operator submits essential data such as the batch's timestamp, +its sequential number, and the hash of the previous batch. +The Merkle tree's root hash serves as the foundational root hash for the batch. +The SystemContext verifies these details immediately, ensuring consistency and integrity right from the initiation phase. +The underlying operations and consistency checks are detailed [here](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/SystemContext.sol#L416). + +### Processing and consistency checks of L2 blocks + +#### `setL2Block` + +Before processing each transaction, +the [`setL2Block` method](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L2605) +is invoked, configuring the necessary parameters for the L2 block that will contain the transaction. +There we will provide some data about the L2 block that the transaction belongs to: + +- `_l2BlockNumber` The number of the new L2 block. +- `_l2BlockTimestamp` The timestamp of the new L2 block. +- `_expectedPrevL2BlockHash` The expected hash of the previous L2 block. +- `_isFirstInBatch` Whether this method is called for the first time in the batch. +- `_maxVirtualBlocksToCreate` The maximum number of virtual block to create with this L2 block. + +If two transactions belong to the same L2 block, only the first one may have non-zero `_maxVirtualBlocksToCreate`. The +rest of the data must be same. + +Detailed operations can be found [here](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/SystemContext.sol#L312). + +### L2 blockhash calculation and storage + +The hash for each L2 block is dynamically calculated using `keccak256`, encoding various block details and transaction hashes. +This mechanism ensures that each block can be independently verified and traced within the L2 framework. + +The hash of an L2 block is +`keccak256(abi.encode(_blockNumber, _blockTimestamp, _prevL2BlockHash, _blockTxsRollingHash))`. + +`_blockTxsRollingHash` is defined in the following way: + +`_blockTxsRollingHash = 0` for an empty block. + +`_blockTxsRollingHash = keccak(0, tx1_hash)` for a block with one tx. + +`_blockTxsRollingHash = keccak(keccak(0, tx1_hash), tx2_hash)` for a block with two txs, etc. + +To add a transaction hash to the current miniblock we use the `appendTransactionToCurrentL2Block` +[function](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/SystemContext.sol#L373). + +Since zkSync is a state-diff based rollup, there is no way to deduce the hashes of the L2 blocks based on the transactions’ in the batch +(because there is no access to the transaction’s hashes). +At the same time, in order to serve `blockhash` method, the VM requires the knowledge of some of the previous L2 block hashes. +In order to save up on pubdata (by making sure that the same storage slots are reused, i.e. we only have repeated writes) we store only the +[last 257 block hashes](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/SystemContext.sol#L70). +You can read more on what are the repeated writes and how the pubdata is processed +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +### Legacy blockhash + +For blocks that predate certain system upgrades (migration upgrades), +the blockhash is generated using a simplified formula that incorporates only the block number. +This method ensures backward compatibility and integrity across different block versions within the zkSync Era system. + +We use the following formula for their hash: + +`keccak256(abi.encodePacked(uint32(_blockNumber)))` + +### Timing invariants + +zkSync Era maintains strict timing invariants to ensure that each block's timestamp is accurate and consistent relative to other system timestamps. + +These invariants include: + +- For each L2 block its timestamp should be > the timestamp of the previous L2 block +- For each L2 block its timestamp should be ≥ timestamp of the batch it belongs to +- Each batch must start with a new L2 block (i.e. an L2 block can not span across batches). +- The timestamp of a batch must be ≥ the timestamp of the latest L2 block which belonged to the previous batch. +- The timestamp of the last miniblock in batch can not go too far into the future. This is enforced by publishing an + L2→L1 log, with the timestamp which is then checked on L1. + +### Finalization of batches with fictive L2 blocks + +At the end of each batch, a fictive L2 block is generated from [the bootloader](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3812) +to finalize the transactions and prepare for the next batch. +This block, typically empty, acts as a procedural step within the internal node operations. +This empty block contains a Transfer event log, representing the bootloader transferring the collected fees to the operator. +Additionally, the timestamps of the batch and the last miniblock are verified +against realistic expectations on L1 to ensure temporal consistency and prevent future discrepancies. diff --git a/content/10.zk-stack/05.concepts/20.fee-mechanism.md b/content/10.zk-stack/05.concepts/20.fee-mechanism.md new file mode 100644 index 00000000..7ae8b5ea --- /dev/null +++ b/content/10.zk-stack/05.concepts/20.fee-mechanism.md @@ -0,0 +1,508 @@ +--- +title: "zkSync Fee Mechanism" +description: "Understanding the fee mechanism in zkSync, including the influence of L1 gas prices on L2 transactions, and the unique pricing for batch overheads and operation costs." +--- + +## Introduction to zkSync's Fee Structure + +In Ethereum, computational and storage costs are quantified using gas, with specific gas costs for each operation, which may change during system [upgrades](https://blog.ethereum.org/2021/03/08/ethereum-berlin-upgrade-announcement). +However, zkSync and other Layer 2 solutions face challenges in adopting this model due to the necessity of publishing pubdata on Ethereum. +As a result, the cost of L2 transactions is tied to the fluctuating gas prices on L1 and cannot be hardcoded. + +--- +## Gas Per Pubdata Limit + +In zkSync, transaction costs are influenced by the volatile gas prices on L1, which are needed to publish pubdata, verify proofs, and more. +This is addressed in zkSync-specific EIP712 transactions which include a `gas_per_pubdata_limit` field, +indicating the maximum gas price the operator can charge users per byte of pubdata. +For Ethereum transactions lacking this field, the operator is restrained from exceeding a predefined constant value. + +--- +## Opcode Pricing Variance + +The complexity of zero-knowledge proof operations differs significantly from standard CPU operations. +For example, the `keccak256` operation, while optimized for CPU performance, +incurs higher costs in zero-knowledge systems due to its mathematical demands, leading to distinct pricing structures in zkSync compared to Ethereum. + +--- +## Intrinsic Costs in zkSync + +Unlike Ethereum, which uses a base intrinsic transaction cost to cover updates to user balances, nonce, and signature verifications, +zkSync does not include these costs in its intrinsic transaction pricing. +This stems from zkSync's support for account abstraction, +allowing different account types to potentially lower transaction costs through optimizations or more zk-friendly signature schemes. +The costs in zkSync primarily cover the intrinsic zero-knowledge proving costs, which are measured through testing and hardcoded due to their complexity. + +--- +## Understanding Batch Overhead + +zkSync incurs operational costs for proving each batch, referred to as "batch overhead," which includes: + +1. **L2 Costs**: These are the costs in L2 gas required for proving circuits. +2. **L1 Costs**: These cover proof verification and general batch processing on L1. + +The protocol aims to maximize transaction inclusion per batch to distribute these costs effectively. +Several factors determine when a batch is sealed, such as time constraints for user experience, +the transaction slot capacity of the bootloader, memory usage from transaction encoding, and pubdata bytes limitations, +which currently stand at 128kb per transaction due to node constraints. + +In the case of zkSync batches, here are the resources the protocol watches to decide when a batch is sealed: + +1. **Time.** The same as on Ethereum, the batch should generally not take too much time to be closed in order to provide + better UX. To represent the time needed we use a batch gas limit, note that it is higher than the gas limit for a + single transaction. +1. **Slots for transactions.** The bootloader has a limited number of slots for transactions, i.e. it can not take more + than a certain transactions per batch. +1. **The memory of the bootloader.** The bootloader needs to store the transaction’s ABI encoding in its memory & this + fills it up. In practical terms, it serves as a penalty for having transactions with large calldata/signatures in case + of custom accounts. +1. **Pubdata bytes.** In order to fully appreciate the gains from the storage diffs, i.e. the fact that changes in a + single slot happening in the same batch need to be published only once, we need to publish all the batch’s public data + only after the transaction has been processed. Right now, we publish all the data with the storage diffs as well as + L2→L1 messages, etc in a single transaction at the end of the batch. Most nodes have limit of 128kb per transaction + and so this is the limit that each zkSync batch should adhere to. + +Each transaction spends the batch overhead proportionally to how much of these resources it requires. + +Note that before the transaction is executed, the system can not know how many of these limited system resources the transaction will actually take. +Therefore, we need to charge for the worst case and provide the [refund](#refunds) at the end of the +transaction. + +--- +## Base Fee and Gas Limits + +To safeguard against DDoS attacks, zkSync implements a `MAX_TRANSACTION_GAS_LIMIT`. +The `baseFee` reflects the real costs of computation for the proof, +and the `gas_per_pubdata_limit` must be set sufficiently high to cover the fees for the required L1 gas per byte of pubdata. +During periods of high L1 gas demand, adjustments to these limits ensure that transactions remain feasible without exceeding resource allocations. + +To make common transactions always executable, we must enforce that the users +are always able to send at least `GUARANTEED_PUBDATA_PER_TX` bytes of pubdata in their transaction. +Because of that, the needed `gas_per_pubdata_limit` for transactions should never grow beyond `MAX_TRANSACTION_GAS_LIMIT/GUARANTEED_PUBDATA_PER_TX`. + +Setting a hard bound on `gas_per_pubdata_limit` also means that with the growth of L1 gas prices, +the L2 `baseFee` will have to grow as well. +This ensures that `base_fee * gas_per_pubdata_limit = L1_gas_price * l1_gas_per_pubdata`. + +This mainly impacts computationally intensive tasks. +For these kinds of transactions, we need to conservatively charge a big upfront payment. +It is compensated with a refund at the end of the transaction for any overspent gas. + +--- +## Trusted Gas Limit + +For operations requiring extensive pubdata, such as deploying a new contract, the `MAX_TRANSACTION_GAS_LIMIT` may be exceeded. +This is accommodated by including the bytecode in a separate part of the pubdata, termed "factory dependencies," +allowing the operator to gauge the required gas expenditure accurately. + +to provide a better user experience for users, the operator may provide the +[trusted gas limit](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1137), +i.e. the limit which exceeds `MAX_TRANSACTION_GAS_LIMIT` assuming that the operator is +sure that the excess gas will be spent on the pubdata. + +--- +## Refunds + +Another distinctive feature of the fee model used on zkSync is the availability of refunds. +Refunds can be issued for unused limited system resources and overpaid computation. +This is needed because of the relatively big upfront payments required in zkSync to provide DDoS security. + +--- +## Formulas and constants for calculating fees + +After determining price for each opcode in gas according to the model above, +the following formulas are used for calculating `baseFee` and `gasPerPubdata` for a batch. + +### System-wide constants + +These constants are to be hardcoded and can only be changed via either system contracts/bootloader or VM upgrade. + +`BATCH_OVERHEAD_L1_GAS` (_L_1_O_)— The L1 gas overhead for a batch (proof verification, etc). + +`L1_GAS_PER_PUBDATA_BYTE` (_L_1_PUB_) — The number of L1 gas needed for a single pubdata byte. +It is slightly higher than 16 gas needed for publishing a non-zero byte of pubdata on-chain (currently the value of 17 is used). + +`BATCH_OVERHEAD_L2_GAS` (_EO_)— The constant overhead denominated in gas. This overhead is created to cover the +amortized costs of proving. + +`BLOCK_GAS_LIMIT` (_B_) — The maximum number of computation gas per batch. This is the maximal number of gas that can be +spent within a batch. This constant is rather arbitrary and is needed to prevent transactions from taking too much time +from the state keeper. It can not be larger than the hard limit of 2^32 of gas for VM. + +`MAX_TRANSACTION_GAS_LIMIT` (_TM_) — The maximal transaction gas limit. For _i_-th single instance circuit, the price of +each of its units is $SC_i = \lceil \frac{T_M}{CC_i} \rceil$ to ensure that no transaction can run out of these single +instance circuits. + +`MAX_TRANSACTIONS_IN_BATCH` (_TXM_) — The maximum number of transactions per batch. A constant in bootloader. Can +contain almost any arbitrary value depending on the capacity of batch that we want to have. + +`BOOTLOADER_MEMORY_FOR_TXS` (_BM_) — The size of the bootloader memory that is used for transaction encoding +(i.e. excluding the constant space, preallocated for other purposes). + +`GUARANTEED_PUBDATA_PER_TX` (_PG_) — The guaranteed number of pubdata that should be possible to pay for in one zkSync +batch. This is a number that should be enough for most reasonable cases. + +### Derived constants + +Some of the constants are derived from the system constants above: + +`MAX_GAS_PER_PUBDATA` (_EPMax_) — the `gas_price_per_pubdata` that should always be enough to cover for publishing a +pubdata byte: + +$$ +EP_{Max} = \lfloor \frac{T_M}{P_G} \rfloor +$$ + +### Externally-Provided Batch Parameters + +`L1_GAS_PRICE` (_L_1_P_) — The price for L1 gas in ETH. + +`FAIR_GAS_PRICE` (_Ef_) — The “fair” gas price in ETH, that is, the price of proving one circuit (in Ether) divided by +the number we chose as one circuit price in gas. + +$$ +E_f = \frac{Price_C}{E_C} +$$ + +where _PriceC_ is the price for proving a circuit in ETH. Even though this price will generally be volatile (due to the +volatility of ETH price), the operator is discouraged to change it often, because it would volatile both volatile gas +price and (most importantly) the required `gas_price_per_pubdata` for transactions. + +Both of the values above are currently provided by the operator. Later on, some decentralized/deterministic way to +provide these prices will be utilized. + +### Determining `base_fee` + +When the batch opens, we can calculate the `FAIR_GAS_PER_PUBDATA_BYTE` (_EPf_) — “fair” gas per pubdata byte: + +$$ +EP_f = \lceil \frac{L1_p * L1_{PUB}}{E_f} \rceil +$$ + +There are now two situations that can be observed: + +I. + +$$ + EP_f > EP_{Max} +$$ + +This means that the L1 gas price is so high that if we treated all the prices fairly, then the number of gas required to +publish guaranteed pubdata is too high, i.e. allowing at least _PG_ pubdata bytes per transaction would mean that we +would to support _tx_._gasLimit_ greater that the maximum gas per transaction _TM_, allowing to run out of other finite +resources. + +If $EP_f > EP_{Max}$, then the user needs to artificially increase the provided _Ef_ to bring the needed +_tx_._gasPerPubdataByte_ to _EPmax_ + +In this case we set the EIP1559 `baseFee` (_Base_): + +$$ +Base = max(E_f, \lceil \frac{L1_P * L1_{PUB}}{EP_{max}} \rceil) +$$ + +Only transactions that have at least this high gasPrice will be allowed into the batch. + +II. + +Otherwise, we keep $Base* = E_f$ + +Note, that both cases are covered with the formula in case (1), i.e.: + +$$ +Base = max(E_f, \lceil \frac{L1_P * L1_{PUB}}{EP_{max}} \rceil) +$$ + +This is the base fee that will be always returned from the API via `eth_gasGasPrice`. + +### Overhead for a Transaction + +Let’s define by _tx_._actualGasLimit_ as the actual gasLimit that is to be used for processing of the transaction +(including the intrinsic costs). In this case, we will use the following formulas for calculating the upfront payment +for the overhead: + +$$ +S_O = 1/TX_M +$$ + +$$ +M_O(tx) = encLen(tx) / B_M +$$ + +$$ +E_{AO}(tx) = tx.actualGasLimit / T_M +$$ + +$$ +O(tx) = max(S_O, M_O(tx), E_O(tx)) +$$ + +where: + +_SO_ — is the overhead for taking up 1 slot for a transaction + +_MO_(_tx_) — is the overhead for taking up the memory of the bootloader + +_encLen_(_tx_) — the length of the ABI encoding of the transaction’s struct. + +_EAO_(_tx_) — is the overhead for potentially taking up the gas for single instance circuits. + +_O_(_tx_) — is the total share of the overhead that the transaction should pay for. + +Then we can calculate the overhead that the transaction should pay as the following one: + +$$ +L1_O(tx) = \lceil \frac{L1_O}{L1_{PUB}} \rceil * O(tx) +$$ +$$ +E_O(tx) = E_O * O(tx) +$$ + +Where + +_L_1_O_(_tx_) — the number of L1 gas overhead (in pubdata equivalent) the transaction should compensate for gas. + +_EO_(_tx_) — the number of L2 gas overhead the transaction should compensate for. + +Then: + +_overhead_\__gas_(_tx_) = _EO_(_tx_) + _tx_._gasPerPubdata_ ⋅ _L_1_O_(_tx_) + +When a transaction is being estimated, the server returns the following gasLimit: + +_tx_._gasLimit_ = _tx_._actualGasLimit_ + _overhead_\__gas_(_tx_) + +Note, that when the operator receives the transaction, it knows only _tx_._gasLimit_. The operator could derive the +_overhead\_\_\_gas_(_tx_) and provide the bootloader with it. The bootloader will then derive +_tx_._actualGasLimit_ = _tx_._gasLimit_ − _overhead_\_\_gas*(\_tx*) and use the formulas above to derive the overhead that +the user should’ve paid under the derived _tx_._actualGasLimit_ to ensure that the operator does not overcharge the +user. + +### _overhead_(_tx_) + +For the ease of integer calculation, we will use the following formulas to derive the _overhead_(_tx_): + +$B_O(tx) = E_O + tx.gasPerPubdataByte \cdot \lfloor \frac{L1_O}{L1_{PUB}} \rfloor$ + +$B_O$ denotes the overhead for batch in gas that the transaction would have to pay if it consumed the resources for +entire batch. + +Then, _overhead_\__gas_(_tx_) is the maximum of the following expressions: + +1. $S_O = \lceil \frac{B_O}{TX_M} \rceil$ +2. $M_O(tx) = \lceil \frac{B_O \cdot encodingLen(tx)}{B_M} \rceil$ +3. $E_O(tx) = \lceil \frac{B_O \cdot tx.gasBodyLimit}{T_M} \rceil$ + +### Deriving `overhead_gas(tx)` from `tx.gasLimit` + +The task that the operator needs to do is the following: + +Given the tx.gasLimit, it should find the maximal `overhead_gas(tx)`, such that the bootloader will accept such +transaction, that is, if we denote by _Oop_ the overhead proposed by the operator, the following equation should hold: + +$$ +O_{op} ≤ overhead_gas(tx) +$$ + +for the $tx.bodyGasLimit$ we use the $tx.bodyGasLimit$ = $tx.gasLimit − O_{op}$. + +There are a few approaches that could be taken here: + +- Binary search. However, we need to be able to use this formula for the L1 transactions too, which would mean that + binary search is too costly. +- The analytical way. This is the way that we will use and it will allow us to find such an overhead in O(1), which is + acceptable for L1->L2 transactions. + +Let’s rewrite the formula above the following way: + +$$ +O_{op} ≤ max(SO, MO(tx), EO(tx)) +$$ + +So we need to find the maximal $O_{op}$, such that $O_{op} ≤ max(S_O, M_O(tx), E_O(tx))$. Note, that it means ensuring +that at least one of the following is true: + +1. $O_{op} ≤ S_O$ +2. $O_{op} ≤ M_O(tx)$ +3. $O_{op} ≤ E_O(tx)$ + +So let’s find the largest _Oop_ for each of these and select the maximum one. + +- Solving for (1) + +$$ +O_{op} = \lceil \frac{B_O}{TX_M} \rceil +$$ + +- Solving for (2) + +$$ +O_{op} = \lceil \frac{encLen(tx) \cdot B_O}{B_M} \rceil +$$ + +- Solving for (3) + +This one is somewhat harder than the previous ones. We need to find the largest _O\_{op}_, such that: + +$$ +O_{op} \le \lceil \frac{tx.actualErgsLimit \cdot B_O}{T_M} \rceil +$$ + +$$ +O_{op} \le \lceil \frac{(tx.ergsLimit - O_{op}) \cdot B_O}{T_M} \rceil +$$ + +$$ +O_{op} ≤ \lceil \frac{B_O \cdot (tx.ergsLimit - O_{op})}{T_M} \rceil +$$ + +Note, that all numbers here are integers, so we can use the following substitution: + +$$ +O_{op} -1 \lt \frac{(tx.ergsLimit - O_{op}) \cdot B_O}{T_M} +$$ + +$$ +(O_{op} -1)T_M \lt (tx.ergsLimit - O_{op}) \cdot B_O +$$ + +$$ +O_{op} T_M + O_{op} B_O \lt tx.ergsLimit \cdot B_O + T_M +$$ + +$$ +O_{op} \lt \frac{tx.ergsLimit \cdot B_O + T_M}{B_O + T_M} +$$ + +Meaning, in other words: + +$$ +O_{op} = \lfloor \frac{tx.ergsLimit \cdot B_O + T_M - 1}{B_O + T_M} \rfloor +$$ + +Then, the operator can safely choose the largest one. + +### Discounts by the operator + +It is important to note that the formulas provided above are to withstand the worst-case scenario and these are the +formulas used for L1->L2 transactions (since these are forced to be processed by the operator). However, in reality, the +operator typically would want to reduce the overhead for users whenever it is possible. For instance, in the server, we +underestimate the maximal potential `MAX_GAS_PER_TRANSACTION`, since usually the batches are closed because of either +the pubdata limit or the transactions’ slots limit. For this reason, the operator also provides the operator’s proposed +overhead. The only thing that the bootloader checks is that this overhead is _not larger_ than the maximal required one. +But the operator is allowed to provide a lower overhead. + +### Refunds + +As you could see above, this fee model introduces quite some places where users may overpay for transactions: + +- For the pubdata when L1 gas price is too low +- For the computation when L1 gas price is too high +- The overhead, since the transaction may not use the entire batch resources they could. + +To compensate users for this, we will provide refunds for users. For all of the refunds to be provable, the counter +counts the number of gas that was spent on pubdata (or the number of pubdata bytes published). We will denote this +number by _pubdataused_. For now, this value can be provided by the operator. + +The fair price for a transaction is + +$$ +FairFee = E_f \cdot tx.computationalGas + EP_f \cdot pubdataused +$$ + +We can derive $tx.computationalGas = gasspent − pubdataused \cdot tx.gasPricePerPubdata$, where _gasspent_ is the number +of gas spent for the transaction (can be trivially fetched in Solidity). + +Also, the _FairFee_ will include the actual overhead for batch that the users should pay for. + +The fee that the user has actually spent is: + +$$ +ActualFee = gasspent \cdot gasPrice +$$ + +So we can derive the overpay as + +$$ +ActualFee − FairFee +$$ + +In order to keep the invariant of $gasUsed \cdot gasPrice = fee$ , we will formally refund +$\frac{ActualFee - FairFee}{Base}$ gas. + +At the moment, this counter is not accessible within the VM and so the operator is free to provide any refund it wishes +(as long as it is greater than or equal to the actual amount of gasLeft after the transaction execution). + +#### Refunds for repeated writes + +zkEVM is a state diff-based rollup, i.e. the pubdata is published not for transactions, but for storage changes. +This means that whenever a user writes into a storage slot, the user incurs certain amount of pubdata. However, not all writes are equal: + +- If a slot has been already written to in one of the previous batches, the slot has received a short id, which allows + it to require less pubdata in the state diff. +- Depending on the `value` written into a slot, various compression optimizations could be used and so we should reflect + that too. +- Maybe the slot has been already written to in this batch and so we don’t to charge anything for it. + +You can read more about how we treat the pubdata +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). + +The important part here is that while such refunds are inlined (i.e. unlike the refunds for overhead they happen +in-place during execution and not after the whole transaction has been processed), they are enforced by the operator. +Right now, the operator is the one that decides what refund to provide. + +--- +## Improvements in the upcoming releases + +The fee model explained above, while fully functional, has some known issues. These will be tackled with the following +upgrades. + +### The quadratic overhead for pubdata + +Note, that the computational overhead is proportional to the `tx.gasLimit` and the amount of funds the user will have to +pay is proportional to the L1 gas price (recall the formula of `B_O`). We can roughly express the transaction overhead +from computation as `tx.gasLimit * L1_GAS_PRICE * C` where `C` is just some constant. Note, that since a transaction +typically contains some storage writes, and its +`tx.gasLimit = gasSpentOnCompute + pubdataPublished * gasPricePerPubdata`, `tx.gasLimit` is roughly proportional to +`gasPricePerPubdata` and so it is also proportional to `L1_GAS_PRICE`. + +This means that formula `tx.gasLimit * L1_GAS_PRICE * C` becomes _quadratic_ to the `L1_GAS_PRICE`. + +### `gasUsed` depends to `gasLimit` + +While in general it shouldn’t be the case assuming the correct implementation of [refunds](#refunds), in practice it +turned out that the formulas above, while robust, estimate for the worst case which can be very difference from the +average one. In order to improve the UX and reduce the overhead, the operator charges less for the execution overhead. +However, as a compensation for the risk, it does not fully refund for it. + +### L1->L2 transactions do not pay for their execution on L1 + +The `executeBatches` operation on L1 is executed in `O(N)` where N is the number of priority ops that we have in the +batch. Each executed priority operation will be popped and so it incurs cost for storage modifications. As of now, we do +not charge for it. + +--- +## zkEVM Fee Components (Revenue & Costs) + +- On-Chain L1 Costs + - L1 Commit Batches + - The commit batches transaction submits pubdata (which is the list of updated storage slots) to L1. The cost of a + commit transaction is calculated as `constant overhead + price of pubdata`. The `constant overhead` cost is evenly + distributed among L2 transactions in the L1 commit transaction, but only at higher transaction loads. As for the + `price of pubdata`, it is known how much pubdata each L2 transaction consumed, therefore, they are charged + directly for that. Multiple L1 batches can be included in a single commit transaction. + - L1 Prove Batches + - Once the off-chain proof is generated, it is submitted to L1 to make the rollup batch final. Currently, each proof + contains only one L1 batch. + - L1 Execute Batches + - The execute batches transaction processes L2 -> L1 messages and marks executed priority operations as such. + Multiple L1 batches can be included in a single execute transaction. + - L1 Finalize Withdrawals + - While not strictly part of the L1 fees, the cost to finalize L2 → L1 withdrawals are covered by Matter Labs. The + finalize withdrawals transaction processes user token withdrawals from zkEVM to Ethereum. Multiple L2 withdrawal + transactions are included in each finalize withdrawal transaction. +- On-Chain L2 Revenue + - L2 Transaction Fee + - This fee is what the user pays to complete a transaction on zkEVM. It is calculated as + `gasLimit x baseFeePerGas - refundedGas x baseFeePerGas`, or more simply, `gasUsed x baseFeePerGas`. +- Profit = L2 Revenue - L1 Costs - Off Chain Infrastructure Costs diff --git a/content/10.zk-stack/05.concepts/30.finality.md b/content/10.zk-stack/05.concepts/30.finality.md new file mode 100644 index 00000000..a84f88fa --- /dev/null +++ b/content/10.zk-stack/05.concepts/30.finality.md @@ -0,0 +1,46 @@ +--- +title: Finality +description: "Explore the concept of finality in blockchain systems and learn about the steps involved in achieving transaction settlement." +--- + +Finality in blockchain systems refers to the point at which a transaction is considered irreversible and permanently part of the blockchain. +This concept is crucial for ensuring transaction security and reliability. + +## Finality on Ethereum + +On Ethereum, finality is typically achieved after 2 [epochs](https://info.etherscan.com/epoch-in-ethereum/), +which translates to approximately 13 minutes under normal network conditions. +This duration allows for sufficient block confirmations to prevent reversals and ensure that transactions are settled securely. + +--- +## Finality on zkSync Era + +zkSync Era, as a Layer 2 (L2) rollup, ties its finality and security mechanisms to those of the underlying Layer 1 (L1) Ethereum chain. +The steps involved in reaching finality in zkSync Era include: + +1. **Batch Formation**: Transactions are collected and grouped into a batch. This step generally takes a few minutes. +2. **Batch Commitment**: The complete batch is committed to the Ethereum blockchain. +3. **Proof Generation**: A cryptographic proof that validates the entire batch is generated. This process typically takes about an hour. +4. **Proof Submission**: The generated proof is submitted to an Ethereum smart contract for verification. +5. **Batch Finalization**: The batch undergoes a final verification and is settled on Ethereum. + This step includes a delay of approximately 21 hours as a security measure during the alpha phase of zkSync Era. + +Overall, the complete finality time for a transaction on zkSync Era is around 24 hours, aligning with the finality of the corresponding Ethereum block. + +::callout{icon="i-heroicons-information-circle" color="blue"} +Advancements in validity proof research are continuously being made, +promising potential reductions in proof generation times and, consequently, faster finality periods in the future. +:: + +--- +## Instant confirmations + +While the full finality process on zkSync Era can take up to 24 hours, transactions are treated with instant confirmation for user convenience: + +- **Immediate Transaction Display**: Once submitted, transactions are quickly shown in the user interface and API as unconfirmed. +- **Immediate Asset Usability**: Users can immediately utilize the transferred assets for further transactions, + which may even be included in the same zkSync Era batch. + +This feature enables a seamless user experience, +although more cautious users may opt to wait for the transaction to reach full finality +or pass any intermediate steps before fully trusting the transaction's completion. diff --git a/content/10.zk-stack/05.concepts/40.system-upgrades.md b/content/10.zk-stack/05.concepts/40.system-upgrades.md new file mode 100644 index 00000000..9e758cc8 --- /dev/null +++ b/content/10.zk-stack/05.concepts/40.system-upgrades.md @@ -0,0 +1,58 @@ +--- +title: System Upgrades +description: Explore the structured approach to system upgrades in zkSync Era, including the roles of different branches and the audit process to ensure security and reliability. +--- + +The [system contracts](https://github.com/matter-labs/era-contracts) at zkSync Era are pivotal for the functionality and security of the platform. +To ensure that these contracts meet the highest standards of security and reliability, a rigorous update and audit process is followed. +Here's a detailed breakdown of the system upgrade process for zkSync Era. + +--- +## Main branch + +The `main` branch of the [system contracts repository](https://github.com/matter-labs/era-contracts/blob/main/README.md) +serves as the production-ready codebase. +It contains the latest, most stable version of the protocol that has passed through all required audits. +This branch represents the secure backbone of zkSync Era, ready for deployment. + +## Development branch + +The `dev` branch is where active development occurs. +All the latest changes and innovative updates are first pushed here. +Developers should base new pull requests for system contract changes on the +[dev branch](https://github.com/matter-labs/era-contracts/blob/dev/README.md) +to ensure all modifications align with the most recent developmental efforts. + +## Release branches + +When a new release is planned, a specific branch named `release-vX-<name>` is created off the `dev` branch. +This naming convention includes the version number (`X`) and a descriptive name for the release. +The release process is structured to maintain clarity and order, ensuring systematic updates: + +1. **Branch Creation**: Initiate from the `dev` branch. +2. **Pull Requests**: Target the new release branch towards `main` or a preceding release branch if the latter hasn't been merged into `main`. + +--- +## Audit and integration + +Once a release branch is ready, it undergoes a thorough audit. +Post-audit, any necessary changes identified during the audit are implemented to enhance security and performance. +After finalizing the audit adjustments: + +- The release branch is merged back into the `dev` branch to consolidate all updates. +- Following the finalization of the release, the `main` branch is updated to reflect the new changes, + ensuring it remains the definitive source for production deployment. +- To maintain consistency and incorporate non-auditable updates like documentation or scripts, + the `main` branch should periodically merge back into the `dev` branch. + +## Handling unaudited code + +For updates that do not require an audit, such as documentation changes or script adjustments, direct merges into the `main` branch are acceptable. +This strategy helps in keeping the `main` branch up-to-date with all non-critical updates, which are then synchronized across other branches. + +--- +## Conclusion + +The structured upgrade process at zkSync Era not only ensures that system contracts are robust and secure +but also maintains a clear pathway from development to deployment. +This process underscores zkSync Era's commitment to reliability, security, and continuous improvement in its blockchain solutions. diff --git a/content/10.zk-stack/05.concepts/50.zk-chains.md b/content/10.zk-stack/05.concepts/50.zk-chains.md new file mode 100644 index 00000000..28f93f6f --- /dev/null +++ b/content/10.zk-stack/05.concepts/50.zk-chains.md @@ -0,0 +1,299 @@ +--- +title: ZK Chains +description: "Delve into the concept of ZK Chains and their integral role in scaling blockchain systems like Ethereum, ensuring a future of efficient, global on-chain activities." +--- + +The need for blockchain scalability is paramount as networks like Ethereum, currently limited to processing about 12 transactions per second, +strive to handle millions of transactions to support global financial activities on-chain. +While various architectures like Polkadot, Cosmos, Near, and Eth 2.0 explore solutions like multi-chain or shard structures, issues with trust persist. +However, zero-knowledge proofs have emerged as a promising solution to these challenges, +offering cryptographic security when combined with data availability layers and ZK Rollups, thereby enhancing the scalability and security of Ethereum. + +## What are ZK Chains? + +ZK Chains represent a sophisticated layer of blockchain architecture, +consisting of parallel-running instances of zkEVM that achieve consensus and finality on Ethereum's Layer 1 (L1). +Inspired by the concept of hyperlinks in the traditional web, which connect various webpages, +ZK Chains utilize Hyperbridges to connect different rollups within the ecosystem, facilitating seamless interactions across chains. + +![hyperbridges](/images/zk-stack/hyperbridges.png) +**Gray lines show proofs, orange lines the hyperbridges, which automatically connect all blue chains.** + +### Structure and functionality + +ZK Chains operate with a shared bridge contract on Ethereum's L1 and include native bridges between individual rollups, +enhancing the overall interoperability and efficiency of the network. Key features of ZK Chains include: + +1. **Trustless Validating Bridges**: Ensures that rollups within the ZK Chain are interconnected without requiring additional trust layers. +2. **Asset Transfers**: Hyperbridges facilitate the easy transfer of assets, including burning and minting mechanisms, across the ecosystem. +3. **Unified Governance**: Leveraging a shared governance framework on L1, + the ecosystem can coordinate updates or respond collectively to vulnerabilities, much like a traditional blockchain network would handle a fork. +4. **Security and Trust**: All ZK Chains must utilize the standardized zkEVM engine to maintain consistent security and operational standards, + ensuring that trust and security are derived directly from L1. + +### Development and Deployment + +ZK Chains can be developed and deployed by anyone, fostering a diverse and open ecosystem. +However, for a ZK Chain to remain trusted and fully interoperable within the network, it must utilize the zkEVM engine that powers the ZK Stack. +This requirement ensures consistency in execution and security across different instances of ZK Chains. + +### Modular Implementation + +ZK Chains are designed to be modular, meaning developers can select different components of their blockchain systems or implement their own, +with the exception of the zkEVM core. +This modular approach allows for customization and flexibility in blockchain development +while maintaining core standards necessary for network security and interoperability. + +--- +## How Hyperbridges work + +Hyperbridges are composed of smart contracts that verify transactions across chains using Merkle proofs. +The process involves locking the original asset in a shared L1 bridge contract, unifying liquidity across the network, and follows these steps: + +1. **Initiation**: A transaction is initiated on a ZK Chain, aimed at crossing to another chain within the ecosystem. +2. **Settlement on L1**: The sending ZK Chain compiles a cryptographic proof of the transaction and settles it onto Ethereum's Layer 1, + anchoring the transaction's validity. +3. **Transaction Root Update**: Ethereum's Layer 1 updates the Transaction Root, a cumulative record reflecting all transactions processed across the ecosystem. +4. **Root Importation**: The receiving ZK Chain imports this updated Transaction Root through its consensus mechanism, + akin to the way Layer 1 to Layer 2 messages are currently handled. +5. **Transaction Submission**: A relayer submits the transaction along with a Merkle Proof to the receiving ZK Chain. + This proof connects the transaction to the newly updated Transaction Root. +6. **Verification and Execution**: The receiving ZK Chain verifies the transaction against the Transaction Root. + If the verification is successful, the transaction is executed, and the relayer is compensated for their service. +7. **Proof Settlement**: Finally, the receiving ZK Chain settles its proof on L1, conclusively validating the transaction within the ecosystem. + +![hyperscaling](/images/zk-stack/hyperscalingBridgingFull.png) + +#### Types of Bridges in the ZK Chain Ecosystem + +- **L1-L2 Bridges**: These bridges are foundational, facilitating direct interactions between Ethereum's main chain (L1) and second-layer solutions (L2). +- **zkPorter Shard Bridges**: Specifically designed for developers, these bridges connect different shards of the zkPorter virtual machine. + They are atomic and asynchronous, ensuring seamless operations akin to traditional blockchain interactions. +- **Hyperbridges**: Similar in function to L2 to L1 bridges, Hyperbridges are asynchronous and not atomic. + They leverage Account Abstraction and the services of external relayers to simplify the user experience, + making cross-chain interactions feel as straightforward as moving from L1 to L2. + +### Enhanced user experience + +Hyperbridges enhance the blockchain user experience by abstracting complex cross-chain interactions. +Users do not need to manually initiate calls on the destination chain, +thanks to the automation provided by Account Abstraction and the efficiency of external relayers. +This setup minimizes transaction fees and reduces the complexity typically associated with cross-chain movements. + +#### Simplified Cross-Chain Transactions + +Hyperbridges utilize Account Abstraction and external relayers to automate the process of initiating calls on destination chains. +This automation means that users do not need to manually manage the technical details of cross-chain transactions. Here’s how this enhances the user experience: + +- **Reduced Complexity**: Users interact with a seamless interface that hides the underlying complexities of blockchain operations. +- **Lower Fees**: By leveraging efficient relayers and minimizing manual operations, + transaction costs are kept low, akin to standard gas fees within a single chain. + +#### Unified Asset Management + +In a ZK Chain environment, users' wallets will display all of their assets across various chains in a unified interface. +Here’s what this integration looks like: + +- **Asset Bridging**: Relayers manage the process of bridging assets between chains, + handling the necessary burning and minting of assets as they move across the ecosystem. +- **Intuitive Addressing**: ZK Chains feature unique identifiers that integrate with the Ethereum Name Service (ENS), + making recipient addresses as straightforward as email addresses. + While users can still use traditional Ethereum addresses, the combination with ZK Chain identifiers simplifies transactions further. + +#### Protocol-Integrated Bridging + +Bridging is integrated directly into the transaction protocols of wallets, streamlining the process alongside standard asset transfers. +Key aspects of this integration include: + +- **Quick Settlement Times**: The time taken for bridging transactions depends on the proof settlement time of the specific ZK Chain, + typically ranging from 1 to 15 minutes. +- **Minimal Infrastructure Needs**: With relayers being the primary external infrastructure, the overall system remains lightweight and cost-effective. + +#### Real-World Application: Cross ZK Chain Uniswap Transaction + +Consider a practical scenario where you want to swap Ethereum for DAI using a cross ZK Chain transaction on Uniswap: + +1. **Transaction Initiation**: You initiate the transaction directly from your wallet. +2. **Relayer Involvement**: A relayer picks up your Ethereum and deposits it into the Uniswap chain. +3. **Asset Swap**: On the Uniswap chain, your Ethereum is automatically swapped for DAI. +4. **Completion and Return**: The relayer then transfers the DAI back to your original chain. + +This entire process is executed as a single transaction, making it feel as seamless as if no chain-switching occurred. +The only difference a user might notice is a slightly longer confirmation time, depending on the specific ZK Chain used. + +![hyperscalingUniswap](/images/zk-stack/hyperscalingUniswap.png) + +When setting up wallets on cheaper chains using scaling solultions like ([validium](https://ethereum.org/en/developers/docs/scaling/validium/)), +users will have to trust the hosting organization to not lose their funds. +Although the funds held in validiums are secure against theft, they can be frozen if the data becomes unavailable. +This scenario would not only lock users out of their assets but also potentially damage the reputation and operational status of the hosting organization. + +--- +## Proof Aggregation + +Proof aggregation is a critical component in scaling blockchain technologies, +allowing for the efficient verification of transactions across multiple chains. +This process enhances the hyperscalability of the ecosystem, +vital for supporting extensive blockchain operations without overwhelming the base layer (L1). +Below, we explore the various methods of proof aggregation within the ZK Chain ecosystem and their implications. + +### Simple proof aggregation + +Simple proof aggregation treats each ZK Chain's proofs as independent entities that are verified collectively on Ethereum L1. +This method is straightforward but has limitations: + +- **Infrequent Settlements**: To conserve on gas fees, proofs are settled less frequently, which can delay the verification process. +- **Limited Fast Messaging**: The infrequent settlements restrict the ability for rapid communication between chains, + potentially slowing down cross-chain interactions. + +![Simple Proof Aggregation](/images/zk-stack/hyperscalingAggregation.png) + +### L3s: Layered proof aggregation + +In this model, ZK Chains can act as Layer 3 (L3) networks that settle their proofs onto an intermediary Layer 2 (L2) ZK Chain. + hThis structure allows for several benefits and drawbacks: + +- **Faster Inter-L3 Messaging**: L3s on the same L2 can communicate more swiftly and cheaply. +- **Atomic Transactions**: Transactions across L3s can be made atomic through the L2, enhancing transaction reliability. +- **Increased Reversion Risk**: If the L2 faces issues or needs to revert, all dependent L3s could be affected. + +This solution is ultimately not scalable, as the L2's VM will be a bottleneck for proof verification, +as the VM requires a full consensus mechanism, meaning long-term storage, transaction verification, etc. + +![L3 Fast Block Aggregation](/images/zk-stack/hyperscalingL3Fast2Blocks.png) + +### Layered Aggregation + +Combining the benefits of L3s with simple proof aggregation, +this method uses a minimal program on L2 designed specifically for running L3 messaging and proof aggregation: + +- **Scalable and Efficient**: By focusing solely on essential functionalities, this model is more scalable than a full L2 VM. +- **Maintains Light Consensus**: Only a lightweight consensus mechanism is needed, reducing the computational overhead. + +![Layered Aggregation](/images/zk-stack/hyperscalingLayeredAggregation.png) + +### Economic Guarantees + +To address the need for quicker interoperability, economic guarantees can be employed, +allowing transaction roots to be calculated outside of the proof and imported ahead of proof verification: + +- **Optional Add-On**: This method can be added to systems that need faster transaction finality but comes with increased risks. + - This add-on can only work for L3s and Layered Aggregators. +- **Risk of Reversion**: If an invalid transaction is included, all interconnected rollups might need to revert, as generating valid proofs would be impossible. + +![Fast Economic Guarantees](/images/zk-stack/hyperscalingFastEconomic.png) + +### Sovereignty + +ZK Chain retain sovereignty, meaning they can opt in or out of proof aggregation: + +- **Optional Participation**: ZK Chains may choose not to participate in aggregation, + opting instead to settle directly to Ethereum, albeit at higher costs. +- **Decentralized Aggregation Access**: Aggregation remains accessible and decentralized, ensuring low hardware requirements for provers. + +![ZK Chain Sovereignty](/images/zk-stack/hyperscalingSovereignty.png) + +### Feature comparison + +Different aggregation methods offer various advantages and considerations: + +| | Aggregation | L3s | Layered Aggregation | +| ------------------------ | ----------- | ----------------- | --------------------- | +| Fast Messaging | No | Yes | Yes | +| Scales | Yes | No | Yes | +| Consensus Mechanism | None | L2 Full Consensus | Lightweight Consensus | +| Instant Messaging Add-on | No | Yes | Yes | +| Sovereign | Yes | Yes | Yes | + +--- +## Modularity: ZK Chain customization + +The ZK Stack offers a wide array of customization options for developers looking to tailor ZK Chain to specific needs +or create entirely new blockchain architectures. +This modular approach allows for significant flexibility in configuring transaction sequencing, data availability policies, and privacy features. +Below, we explore these customization options in detail. + +### Sequencing transactions + +- **Centralized sequencer** - Utilizes a single operator to quickly confirm transactions, + ideal for high-frequency trading (HFT) but requires trust in the operator’s reliability and integrity. +- **Decentralized sequencer** - Employs a consensus algorithm to determine transaction inclusion, + enhancing security and decentralization but potentially at the cost of higher latency. + It can be any algorithm, so developers can reuse existing implementations (e.g. Tendermint or HotStuff with permissionless dPoS). +- **Priority queue** - Allows transactions to be submitted directly via an L2 or L1 priority queue, + enhancing censorship resistance, particularly useful for governance protocols. + It’s worth noting that the priority queue will always be available as an escape-hatch mechanism + (even if a centralized or decentralized sequencer is employed), to protect users against censorship by a malicious sequencer. +- **External protocol** - Offers freedom to integrate any external sequencing protocols, + providing further flexibility and potential integration with existing systems. + External protocols such as Shared Sequencers and Shared Builders can be used. + +### Data Availability (DA) + +Data Availability (DA) is a critical component in ensuring the security and functionality of ZK Chain. +It governs how transaction data is managed and made accessible, impacting everything from user privacy to transaction speed and cost. +Below, we detail the various DA options available to developers using the ZK Stack, each tailored for specific security, privacy, and scalability needs. + +#### zk-Rollup +zk-Rollup is the recommended DA policy for most ZK Chain. +It ensures that the values of every changed storage slot are published as calldata (or blobs, depending on what's +cheaper) on Ethereum's Layer 1 (L1). This approach benefits from: + +- **Amortization of Costs**: Changes that net to zero are not posted, reducing unnecessary data and saving costs. +- **Inherited Security**: Adopts the full security and censorship-resistance properties of Ethereum, providing robust protection against potential attacks. + +#### zkPorter +zkPorter is detailed extensively in [this informative post](https://blog.matter-labs.io/zkporter-a-breakthrough-in-l2-scaling-ed5e48842fbf). +Key aspects include: + +- **Cost Efficiency**: Designed for users seeking lower transaction costs, potentially at the expense of higher security risks. +- **Guardian Networks**: Developers can utilize the zkSync main zkPorter implementation, +establish their own guardian network, or integrate external DA solutions like EigenDA. + +#### Validium +Validium offers a privacy-oriented solution ideal for enterprise applications that require both auditability and confidentiality. +Its key characteristics are: + +- **Controlled DA**: The hosting organization controls data availability, which can easily be restricted to maintain privacy. +- **Simpler Implementation**: As a simpler variant of zkPorter, Validium allows for straightforward deployment +but is generally discouraged for mainstream use due to its trust assumptions. + +#### Based zkRollup +Based zkRollup requires the publication of full transaction inputs instead of just final storage updates, +resembling the DA approach used in optimistic rollups: + +- **State Reconstruction**: Allows for trustless state verification and reconstruction, similar to optimistic rollups but with the benefits of zkRollups. +- **Application Specific**: Best suited for chains where transactions may trigger extensive data changes, such as those involving complex financial simulations. + +#### zkRollup (Self-hosted) +zkRollup (Self-hosted) represents an innovative approach where users manage their own data: + +- **User-hosted Data**: Users store all relevant data for their accounts, significantly enhancing privacy and reducing on-chain data requirements. +- **Minimal Data Footprint**: Potentially reduces the data footprint to as little as 5 bytes per user interaction, drastically scaling potential. +- **Complex Implementation**: While offering tremendous benefits, +this option requires sophisticated technical solutions to manage user interactions smoothly and securely. + +### Logical state partitions in ZK Porters + +Logical state partitions within ZK Porters offer a powerful way for ZK Chain +to manage and interact with distinct subsets of their state in a synchronized manner. +This modular architecture not only increases the efficiency and scalability of operations but also introduces advanced functionalities +such as atomic transactions and state interoperability between partitions. + +Synchronicity is important as it enables atomic transactions between partitions, unlocking several unique use cases: + +- Transparently reading the state of another partition. +- Using flash loans between the partitions. + +One prominent example of this is a combination of **[zkRollup + zkPorter](https://blog.matter-labs.io/zkporter-a-breakthrough-in-l2-scaling-ed5e48842fbf)**: + +![hyperscalingZKPorter](/images/zk-stack/hyperscalingZKPorter.png) + +### Privacy + +ZK Chains support various methods to enhance privacy: + +- **Validium Mode**: Naturally provides privacy as long as the data is kept confidential by the operator. +- **Privacy Protocols**: Specialized L3 protocols like Aztec or Tornado can be integrated to provide user-level privacy + while benefiting from zkSync Era’s features like account abstraction. +- **Self-hosted Rollups**: Represent a long-term solution for privacy and scalability, where users manage their data and confirm state transitions off-chain. diff --git a/content/10.zk-stack/05.concepts/60.data-availability/00.index.md b/content/10.zk-stack/05.concepts/60.data-availability/00.index.md new file mode 100644 index 00000000..27517619 --- /dev/null +++ b/content/10.zk-stack/05.concepts/60.data-availability/00.index.md @@ -0,0 +1,88 @@ +--- +title: Overview +description: An in-depth look at how zkSync ensures data availability through state diffs and compresses data to optimize L1 submissions, plus tools for reconstructing L2 state from L1 public data. +--- + +Data availability is a cornerstone of zkSync's architecture, +ensuring that the entire Layer 2 (L2) state can be +[reconstructed](https://github.com/matter-labs/zksync-era/blob/main/docs/specs/data_availability/reconstruction.md) +from the data submitted to Ethereum's Layer 1 (L1). +This process not only secures the network but also optimizes cost-efficiency through innovative data management techniques. + +## State diffs: Optimizing storage slots + +Instead of submitting detailed transaction data, zkSync focuses on posting **state diffs** to L1. +These diffs represent changes in the blockchain's state, enabling zkSync to efficiently manage how data is stored and referenced: + +- **Efficient Use of Storage Slots**: Changes to the same storage slots across multiple transactions can be grouped, + reducing the amount of data that needs to be sent to L1 and thereby lowering gas costs. +- **Compression Techniques**: All data sent to L1, including state diffs, is compressed to further reduce costs. + [Read more about zkSync's compression methods](https://github.com/matter-labs/zksync-era/blob/main/docs/specs/data_availability/compression.md). + +## Additional data posted to L1 + +In addition to state diffs, zkSync also posts other crucial information to ensure comprehensive data availability: + +- **L2 to L1 Logs and Messages**: These ensure that communications and events are recorded and accessible. +- **Published Bytecodes**: The bytecodes of deployed smart contracts are made available, crucial for contract interaction and verification. +- **Compressed State Diffs**: Further optimizes data management by reducing the size of state changes posted to L1. + +## Validiums and zkPorter: Balancing Security and Efficiency + +When a chain opts not to post its data on-chain, it operates under a model known as a **validium**. +This approach significantly reduces costs by keeping data off-chain but introduces risks related to data accessibility and security: + +- **zkPorter**: A hybrid model that combines features of rollups and validiums, zkPorter segments data responsibilities, +allowing some storage slots to remain off-chain while critical data is posted to L1. +[Learn more about zkPorter](https://github.com/matter-labs/zksync-era/blob/main/docs/specs/data_availability/validium_zk_porter.md). + +## Recreating L2 State From L1 Pubdata + +zkSync provides tools to validate and reconstruct the L2 state from data available on L1. Here's how this process is typically managed: + +## Basic Flow + +1. First, we need to filter all of the transactions to the L1 zkSync contract for only the `commitBlocks` transactions +where the proposed block has been referenced by a corresponding `executeBlocks` call +(the reason for this is that a committed or even proven block can be reverted but an executed one cannot). + +2. Once we have all the committed blocks that have been executed, we then will pull the transaction input and the relevant fields. +The kinds of pubdata we’ll pull from transaction data: + - L2 to L1 Logs + - L2 to L1 Messages + - Published Bytecodes + - Compressed State Diffs + +## Key components for state reconstruction + +### State Diffs + +State diffs are essential for understanding changes within the blockchain's state, represented as key-value pairs: + +```text +naive way: ( storage_slot, address, value ) +actual: ( derived_key, value ) +compressed: ( derived_key or enumeration index, compressed_value ) +``` + +- **Format**: Typically presented as `(derived_key, value)`, +where `derived_key` is a hash of the storage slot and address, and `value` represents the storage value. +- **Compression and Enumeration**: After the initial post, `derived_key` can be replaced with an enumeration index to optimize data size. +The deeper meaning is that an enumeration key is the leaf index in our storage Merkle tree. + +### Contract Bytecodes + +The handling of contract bytecodes involves: + +- **Compression and Indexing**: Opcodes are chunked, indexed, and compressed by the server-side operator before being verified and sent to L1. +- **Verification and Storage**: A system contract ensures the accuracy of the compression before submission, +with uncompressed bytecode hashes stored in `AccountStorage` for reference. + +This process is split into 2 different parts: + +- [the server side operator](https://github.com/matter-labs/zksync-era/blob/main/core/lib/utils/src/bytecode.rs#L33) handling the compression +- [the system contract](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/Compressor.sol#L42) +verifying that the compression is correct before sending to L1. + +The compressed bytecode makes it way up through `factoryDeps` and the hash of uncompressed bytecode is stored on the `AccountStorage` contract +so the hash of the uncompressed bytecode will be part of the state diffs diff --git a/content/10.zk-stack/05.concepts/60.data-availability/10.recreate-l2-state-from-l1-state-diffs.md b/content/10.zk-stack/05.concepts/60.data-availability/10.recreate-l2-state-from-l1-state-diffs.md new file mode 100644 index 00000000..9c05c271 --- /dev/null +++ b/content/10.zk-stack/05.concepts/60.data-availability/10.recreate-l2-state-from-l1-state-diffs.md @@ -0,0 +1,45 @@ +--- +title: Recreating L2 state from L1 pubdata +--- + +## Prerequisites + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +Note: Running the L1 pubdata tool will require approximately 500GB of memory and will take a few days to complete. +:: + +This tool is written in nightly Rust. +You can install Rust by following the official instructions [here](https://www.rust-lang.org/learn/get-started), +and then running the following command to switch to the nightly toolchain: + +```bash +rustup toolchain install nightly +``` + +Clone the [zksync-state-reconstruct](https://github.com/eqlabs/zksync-state-reconstruct) tool: + +```bash +git clone https://github.com/eqlabs/zksync-state-reconstruct +cd zksync-state-reconstruct +``` + +## Usage + +To start reconstructing the state, run the following command with any valid HTTP/HTTPS Ethereum JSON-RPC endpoint, for example using `https://eth.llamarpc.com`. + +You can also use an Ethereum endpoint from [Alchemy,](https://www.alchemy.com/) [Infura](https://www.infura.io/) or any other endpoint provider: + +```bash +cargo +nightly run -- reconstruct l1 --http-url https://eth.llamarpc.com +``` + +Here's what a successful start of the tool will look like in your terminal: + +![L2 state reconstruction start](/images/zk-stack/l2-state-start.png) + +Here's what the tool will look like as it's running: + +![L2 state reconstruction running](/images/zk-stack/l2-state-running.png) + +Once the tool has finished syncing, you will only be warned if the published L2 state is not the same as the reconstructed L2 state. +If the tool has finished syncing and you have not received any warnings, then the L2 state has been successfully reconstructed. diff --git a/content/10.zk-stack/05.concepts/60.data-availability/20.validiums.md b/content/10.zk-stack/05.concepts/60.data-availability/20.validiums.md new file mode 100644 index 00000000..b1d68dd1 --- /dev/null +++ b/content/10.zk-stack/05.concepts/60.data-availability/20.validiums.md @@ -0,0 +1,60 @@ +--- +title: Validiums +description: Explore the concept of Validiums as a blockchain scaling solution, examining their advantages, risks, and potential use cases, especially in privacy-focused and enterprise applications. +--- + +Validiums represent an innovative approach to scaling blockchain technology by optimizing data availability and computation processes. +This section provides an overview of what Validiums are, their advantages, inherent risks, and potential applications. + +### What is a Validium? + +Validiums are specialized blockchain scaling solutions that leverage off-chain data availability +combined with on-chain validity proofs to enhance throughput and efficiency. +The core characteristics of Validiums include: + +- **Off-Chain Data Availability**: Unlike zk-rollups that store data on-chain, Validiums keep transaction data off-chain, +which can significantly reduce gas costs and enhance privacy. +- **Validity Proofs on Ethereum**: Validiums maintain their integrity and security by verifying state transitions through validity proofs on Ethereum, +ensuring accurate and trustworthy operations. + +### Advantages of Validiums + +Validiums offer several benefits that make them appealing for specific blockchain applications: + +- **Reduced Gas Costs**: By not publishing all transaction data to Ethereum's Layer 1 (L1), Validiums can operate with lower transaction fees. +- **Enhanced Privacy**: The off-chain handling of data allows Validiums to offer enhanced privacy features, +making them suitable for applications where data confidentiality is paramount. +- **Fast Withdrawals**: The ability to verify validity proofs quickly on Ethereum facilitates near-instant withdrawals, improving user experience. + +### Drawbacks and Risks + +Despite their advantages, Validiums also carry certain risks and drawbacks that must be considered: + +- **Data Availability Risks**: There is a potential risk of data being withheld by the parties responsible for its storage, +which could prevent users from accessing their funds. +- **Dependence on Trust and Economic Incentives**: Unlike zk-rollups, the security model of Validiums relies partially on the trustworthiness +and economic incentives of the data custodians, rather than purely on cryptographic proofs. + +### Potential Use Cases + +#### Enterprise Solutions + +Validiums are particularly well-suited for enterprise applications that require a balance between auditability and privacy: + +- **Auditability and Control**: Enterprises can manage the availability of their data, ensuring compliance with internal audits and regulations. +- **Privacy**: By controlling who has access to transaction data, enterprises can protect sensitive information from unauthorized access. + +#### Privacy Applications + +For platforms that prioritize user privacy, Validiums offer an effective solution: + +- **Confidential Transactions**: Users can execute transactions without exposing sensitive data on a public blockchain. +- **Enhanced Security for Private Data**: Enterprises and individuals can maintain a higher level of security and control over their transaction details. + +### Further Resources + +To explore more about Validiums and their role in blockchain scalability, the following resources provide additional insights and detailed analyses: + +- [Ethereum.org - Validium](https://ethereum.org/en/developers/docs/scaling/validium/) +- [DeFi Pulse - Rollups, Validiums, and Volitions](https://www.defipulse.com/blog/rollups-validiums-and-volitions-learn-about-the-hottest-ethereum-scaling-solutions) +- [Coda - Web3 One Pager on Validium](https://coda.io/@enzo/web3-one-pager/validium-42) diff --git a/content/10.zk-stack/05.concepts/60.data-availability/_dir.yml b/content/10.zk-stack/05.concepts/60.data-availability/_dir.yml new file mode 100644 index 00000000..9ea5693a --- /dev/null +++ b/content/10.zk-stack/05.concepts/60.data-availability/_dir.yml @@ -0,0 +1 @@ +title: Data Availability diff --git a/content/10.zk-stack/05.concepts/99.account-abstraction.md b/content/10.zk-stack/05.concepts/99.account-abstraction.md new file mode 100644 index 00000000..89f04e02 --- /dev/null +++ b/content/10.zk-stack/05.concepts/99.account-abstraction.md @@ -0,0 +1,47 @@ +--- +title: Account Abstraction +description: Explore the nuances of account abstraction in zkSync, including account versioning, nonce ordering, and the significance of returned magic values in transaction validation. +--- + +Account abstraction (AA) is a pivotal feature in zkSync that allows for greater flexibility and +functionality in how accounts operate and interact with transactions. + +For a comprehensive understanding, it is recommended to review +the detailed [AA protocol documentation](/build/developer-reference/account-abstraction). + +### Account versioning + +Account versioning in zkSync facilitates future updates and changes to the account abstraction protocol +by allowing accounts to specify which version of the protocol they support. +This capability ensures backward compatibility and supports evolutionary development without disrupting existing account functionalities. +The versions supported currently are: + +- `None`: Indicates a basic contract that is not designed to initiate transactions. +Such accounts are passive and should not be used as the `from` field in a transaction. +- `Version1`: Supports the initial version of the account abstraction protocol, incorporating all foundational features and specifications. + +### Nonce ordering + +Nonce ordering is a critical aspect that dictates how transactions from accounts are processed concerning their sequence. +Accounts in zkSync can choose between two types of nonce ordering: + +- `Sequential`: Nonces must follow a strict sequential order similar to externally owned accounts (EOAs). +This setting ensures that transactions are processed in a specific sequence, requiring transaction `X` to be processed before transaction `X+1`. +- `Arbitrary`: Allows transactions to be processed in any order, regardless of their nonce values. +While this mode offers flexibility, it currently may lead to transactions being rejected or stuck in the mempool due to nonce mismatches. + +It's important to note that these settings serve as guidelines to the operator for managing the mempool and are not enforced by system contracts. +However, they provide a framework for expected transaction handling. + +### Returned magic value + +Both accounts and paymasters in zkSync are required to return a specific magic value during transaction validation. +This magic value is critical for ensuring the integrity and correctness of transactions on the mainnet: + +- **Validation Enforcement**: On the mainnet, the magic value is strictly enforced to confirm that transactions meet all protocol criteria before execution. +- **Fee Estimation**: During fee estimation, the magic value is not considered, +focusing solely on the computational costs associated with executing the transaction. + +To aid in accurate fee estimations, accounts are advised to execute all operations during validation as they would under normal circumstances +but return an invalid magic value only at the end of the validation process. +This approach helps in approximating the transaction's cost more accurately. diff --git a/content/10.zk-stack/05.concepts/99.l1_l2_communication.md b/content/10.zk-stack/05.concepts/99.l1_l2_communication.md new file mode 100644 index 00000000..69205f1a --- /dev/null +++ b/content/10.zk-stack/05.concepts/99.l1_l2_communication.md @@ -0,0 +1,264 @@ +--- +title: L1 <-> L2 Communication +description: +--- + +## Deposits and Withdrawals + +The zkEVM supports general message passing for L1<->L2 communication. +Proofs are settled on L1, so core of this process is the [L2->L1] message passing process. +[L1->L2] messages are recorded on L1 inside a priority queue, the sequencer picks it up from here and executes it in the zkEVM. +The zkEVM sends an L2->L1 message of the L1 transactions that it processed, +and the rollup's proof is only valid if the processed transactions were exactly right. + +There is an asymmetry in the two directions however, +in the L1->L2 direction we support starting message calls by having a special transaction type called L1 transactions. +In the L2->L1 direction we only support message passing. + +In particular, deposits and withdrawals of ether also use the above methods. +For deposits the L1->L2 transaction is sent with empty calldata, the recipients address and the deposited value. +When withdrawing, an L2->L1 message is sent. This is then processed by the smart contract holding the ether on L1, which releases the funds. + +## L2→L1 communication + +The L2→L1 communication is more fundamental than the L1→L2 communication, as the second relies on the first. L2→L1 +communication happens by the L1 smart contract verifying messages alongside the proofs. The only “provable” part of the +communication from L2 to L1 are native L2→L1 logs emitted by VM. These can be emitted by the `to_l1` +[opcode](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/System%20contracts%20bootloader%20description.md). +Each log consists of the following fields: + +```solidity +struct L2Log { + uint8 l2ShardId; + bool isService; + uint16 txNumberInBatch; + address sender; + bytes32 key; + bytes32 value; +} + +``` + +Where: + +- `l2ShardId` is the id of the shard the opcode was called (it is currently always 0). +- `isService` a boolean flag that is not used right now +- `txNumberInBatch` the number of the transaction in the batch where the log has happened. This number is taken from the + internal counter which is incremented each time the `increment_tx_counter` is + [called](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/System%20contracts%20bootloader%20description.md). +- `sender` is the value of `this` in the frame where the L2→L1 log was emitted. +- `key` and `value` are just two 32-byte values that could be used to carry some data with the log. + +The hashed array of these opcodes is then included into the +[batch commitment](https://github.com/matter-labs/era-contracts/blob/f06a58360a2b8e7129f64413998767ac169d1efd/ethereum/contracts/zksync/facets/Executor.sol#L493). +Because of that we know that if the proof verifies, then the L2→L1 logs provided by the operator were correct, so we can +use that fact to produce more complex structures. Before Boojum such logs were also Merklized within the circuits and so +the Merkle tree’s root hash was included into the batch commitment also. + +### Important system values + +Two `key` and `value` fields are enough for a lot of system-related use-cases, such as sending timestamp of the batch, +previous batch hash, etc. They were and are used +[used](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/SystemContext.sol#L438) +to verify the correctness of the batch's timestamps and hashes. You can read more about block processing +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20&%20L2%20blocks%20on%20zkSync.md). + +### Long L2→L1 messages & bytecodes + +However, sometimes users want to send long messages beyond 64 bytes which `key` and `value` allow us. But as already +said, these L2→L1 logs are the only ways that the L2 can communicate with the outside world. How do we provide long +messages? + +Let’s add an `sendToL1` method in L1Messenger, where the main idea is the following: + +- Let’s submit an L2→L1 log with `key = msg.sender` (the actual sender of the long message) and + `value = keccak256(message)`. +- Now, during batch commitment the operator will have to provide an array of such long L2→L1 messages and it will be + checked on L1 that indeed for each such log the correct preimage was provided. + +A very similar idea is used to publish uncompressed bytecodes on L1 (the compressed bytecodes were sent via the long +L1→L2 messages mechanism as explained above). + +Note, however, that whenever someone wants to prove that a certain message was present, they need to compose the L2→L1 +log and prove its presence. + +## Priority operations + +Also, for each priority operation, we would send its hash and it status via an L2→L1 log. On L1 we would then +reconstruct the rolling hash of the processed priority transactions, allowing to correctly verify during the +`executeBatches` method that indeed the batch contained the correct priority operations. + +Importantly, the fact that both hash and status were sent, it made it possible to +[prove](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/bridge/L1ERC20Bridge.sol#L255) +that the L2 part of a deposit has failed and ask the bridge to release funds. + +## L1→L2 Messaging + +The transactions on zkSync can be initiated not only on L2, but also on L1. There are two types of transactions that can +be initiated on L1: + +- Priority operations. These are the kind of operations that any user can create. +- Upgrade transactions. These can be created only during upgrades. + +### Prerequisites + +Please read the full +[article](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/System%20contracts%20bootloader%20description.md) +on the general system contracts / bootloader structure as well as the pubdata structure with Boojum system to understand +[the difference](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md) +between system and user logs. + +## Priority operations + +### Initiation + +A new priority operation can be appended by calling the +[requestL2Transaction](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Mailbox.sol#L236) +method on L1. This method will perform several checks for the transaction, making sure that it is processable and +provides enough fee to compensate the operator for this transaction. Then, this transaction will be +[appended](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Mailbox.sol#L369C1-L369C1) +to the priority queue. + +### Bootloader + +Whenever an operator sees a priority operation, it can include the transaction into the batch. While for normal L2 +transaction the account abstraction protocol will ensure that the `msg.sender` has indeed agreed to start a transaction +out of this name, for L1→L2 transactions there is no signature verification. In order to verify that the operator +includes only transactions that were indeed requested on L1, the bootloader +[maintains](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L970) +two variables: + +- `numberOfPriorityTransactions` (maintained at `PRIORITY_TXS_L1_DATA_BEGIN_BYTE` of bootloader memory) +- `priorityOperationsRollingHash` (maintained at `PRIORITY_TXS_L1_DATA_BEGIN_BYTE + 32` of the bootloader memory) + +Whenever a priority transaction is processed, the `numberOfPriorityTransactions` gets incremented by 1, while +`priorityOperationsRollingHash` is assigned to `keccak256(priorityOperationsRollingHash, processedPriorityOpHash)`, +where `processedPriorityOpHash` is the hash of the priority operations that has been just processed. + +Also, for each priority transaction, we +[emit](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L966) +a user L2→L1 log with its hash and result, which basically means that it will get Merklized and users will be able to +prove on L1 that a certain priority transaction has succeeded or failed (which can be helpful to reclaim your funds from +bridges if the L2 part of the deposit has failed). + +Then, at the end of the batch, we +[submit](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3819) +and 2 L2→L1 log system log with these values. + +### Batch commit + +During block commit, the contract will remember those values, but not validate them in any way. + +### Batch execution + +During batch execution, we would pop `numberOfPriorityTransactions` from the top of priority queue and +[verify](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Executor.sol#L282) +that their rolling hash does indeed equal to `priorityOperationsRollingHash`. + +## Upgrade transactions + +### Initiation + +Upgrade transactions can only be created during a system upgrade. It is done if the `DiamondProxy` delegatecalls to the +implementation that manually puts this transaction into the storage of the DiamondProxy. Note, that since it happens +during the upgrade, there is no “real” checks on the structure of this transaction. We do have +[some validation](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/upgrades/BaseZkSyncUpgrade.sol#L175), +but it is purely on the side of the implementation which the `DiamondProxy` delegatecalls to and so may be lifted if the +implementation is changed. + +The hash of the currently required upgrade transaction is +[stored](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/Storage.sol#L138) +under `l2SystemContractsUpgradeTxHash`. + +We will also track the batch where the upgrade has been committed in the `l2SystemContractsUpgradeBatchNumber` +[variable](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/Storage.sol#L141). + +We can not support multiple upgrades in parallel, i.e. the next upgrade should start only after the previous one has +been complete. + +### Bootloader + +The upgrade transactions are processed just like with priority transactions, with only the following differences: + +- We can have only one upgrade transaction per batch & this transaction must be the first transaction in the batch. +- The system contracts upgrade transaction is not appended to `priorityOperationsRollingHash` and doesn't increment + `numberOfPriorityTransactions`. Instead, its hash is calculated via a system L2→L1 log _before_ it gets executed. + Note, that it is an important property. More on it [below](#security-considerations). + +### Commit + +After an upgrade has been initiated, it will be required that the next commit batches operation already contains the +system upgrade transaction. It is +[checked](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Executor.sol#L157) +by verifying the corresponding L2→L1 log. + +We also remember that the upgrade transaction has been processed in this batch (by amending the +`l2SystemContractsUpgradeBatchNumber` variable). + +### Revert + +In a very rare event when the team needs to revert the batch with the upgrade on zkSync, the +`l2SystemContractsUpgradeBatchNumber` is +[reset](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Executor.sol#L412). + +Note, however, that we do not “remember” that certain batches had a version before the upgrade, i.e. if the reverted +batches will have to be re-executed, the upgrade transaction must still be present there, even if some of the deleted +batches were committed before the upgrade and thus didn’t contain the transaction. + +### Execute + +Once batch with the upgrade transaction has been executed, we +[delete](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Executor.sol#L304) +them from storage for efficiency to signify that the upgrade has been fully processed and that a new upgrade can be +initiated. + +## Security considerations + +Since the operator can put any data into the bootloader memory and for L1→L2 transactions the bootloader has to blindly +trust it and rely on L1 contracts to validate it, it may be a very powerful tool for a malicious operator. Note, that +while the governance mechanism is generally trusted, we try to limit our trust for the operator as much as possible, +since in the future anyone would be able to become an operator. + +Some time ago, we _used to_ have a system where the upgrades could be done via L1→L2 transactions, i.e. the +implementation of the `DiamondProxy` upgrade would +[include](https://github.com/matter-labs/era-contracts/blob/f06a58360a2b8e7129f64413998767ac169d1efd/ethereum/contracts/zksync/upgrade-initializers/DIamondUpgradeInit2.sol#L27) +a priority transaction (with `from` equal to for instance `FORCE_DEPLOYER`) with all the upgrade params. + +In the Boojum though having such logic would be dangerous and would allow for the following attack: + +- Let’s say that we have at least 1 priority operations in the priority queue. This can be any operation, initiated by + anyone. +- The operator puts a malicious priority operation with an upgrade into the bootloader memory. This operation was never + included in the priority operations queue / and it is not an upgrade transaction. However, as already mentioned above + the bootloader has no idea what priority / upgrade transactions are correct and so this transaction will be processed. + +The most important caveat of this malicious upgrade is that it may change implementation of the `Keccak256` precompile +to return any values that the operator needs. + +- When the`priorityOperationsRollingHash` will be updated, instead of the “correct” rolling hash of the priority + transactions, the one which would appear with the correct topmost priority operation is returned. The operator can’t + amend the behaviour of `numberOfPriorityTransactions`, but it won’t help much, since the + the`priorityOperationsRollingHash` will match on L1 on the execution step. + +That’s why the concept of the upgrade transaction is needed: this is the only transaction that can initiate transactions +out of the kernel space and thus change bytecodes of system contracts. That’s why it must be the first one and that’s +why +[emit](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L587) +its hash via a system L2→L1 log before actually processing it. + +### Why it doesn’t break on the previous version of the system + +This section is not required for Boojum understanding but for those willing to analyze the production system that is +deployed at the time of this writing. + +Note that the hash of the transaction is calculated before the transaction is executed: +[https://github.com/matter-labs/era-contracts/blob/main/system-contracts/bootloader/bootloader.yul#L1055](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/bootloader/bootloader.yul#L1055) + +And then we publish its hash on L1 via a _system_ L2→L1 log: +[https://github.com/matter-labs/era-contracts/blob/main/system-contracts/bootloader/bootloader.yul#L1133](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/bootloader/bootloader.yul#L1133) + +In the new upgrade system, the `priorityOperationsRollingHash` is calculated on L2 and so if something in the middle +changes the implementation of `Keccak256`, it may lead to the full `priorityOperationsRollingHash` be maliciously +crafted. In the pre-Boojum system, we publish all the hashes of the priority transactions via system L2→L1 and then the +rolling hash is calculated on L1. This means that if at least one of the hash is incorrect, then the entire rolling hash +will not match also. diff --git a/content/10.zk-stack/05.concepts/_dir.yml b/content/10.zk-stack/05.concepts/_dir.yml new file mode 100644 index 00000000..80c9daae --- /dev/null +++ b/content/10.zk-stack/05.concepts/_dir.yml @@ -0,0 +1 @@ +title: Concepts diff --git a/content/10.zk-stack/10.components/10.index.md b/content/10.zk-stack/10.components/10.index.md new file mode 100644 index 00000000..4dc91672 --- /dev/null +++ b/content/10.zk-stack/10.components/10.index.md @@ -0,0 +1,31 @@ +--- +title: Overview +description: Explore the ZK Stack, a flexible, open-source framework designed for creating sovereign ZK-powered Ethereum rollups, known as ZK Chains, utilizing the foundational technology of zkSync Era. +--- + +The ZK Stack is a comprehensive framework aimed at revolutionizing the development of Ethereum rollups through its modular design and open-source nature. +Based on the pioneering work of zkSync Era, +the ZK Stack extends the core functionalities to enable developers +to build custom ZK Chains—Layer 2 (L2) and Layer 3 (L3) solutions—that are tailored to specific needs +while ensuring compatibility and interoperability within the Ethereum ecosystem. + +## Core features of the ZK Stack + +### Sovereignty + +The ZK Stack is built on the principle of sovereignty, granting developers complete control over their ZK Chains. +This means that developers can: + +- **Customize Chain Features**: Tailor aspects such as transaction rules, +data availability, and consensus mechanisms to suit specific use cases or performance requirements. +- **Own the Code**: Have full rights to the underlying code, providing the freedom to modify or enhance the chain as needed without external constraints. + +### Seamless connectivity + +Despite each ZK Chain's independence, the ZK Stack ensures that they do not operate in isolation. +Instead, ZK Chains are part of a cohesive network, linked by hyperbridges that facilitate seamless interactions: + +- **Trustless Interoperability**: Hyperbridges allow for secure and reliable communication between different ZK Chains +without needing to trust a central authority, making interactions as trustless as those on Ethereum itself. +- **Fast and Cost-Effective**: Communication and asset transfers between ZK Chains are designed to be both rapid (completed within minutes) +and economical (incurring costs equivalent to a single standard transaction). diff --git a/content/10.zk-stack/10.components/100.block-explorer.md b/content/10.zk-stack/10.components/100.block-explorer.md new file mode 100644 index 00000000..51f0505a --- /dev/null +++ b/content/10.zk-stack/10.components/100.block-explorer.md @@ -0,0 +1,20 @@ +--- +title: Block Explorer +description: Explore the functionality of Block Explorer, a comprehensive tool for monitoring activities on your ZK Chain. +--- + +[The Block Explorer](https://github.com/matter-labs/block-explorer) +is a tool designed to provide comprehensive insights into all activities occurring on a ZK Chain. +This tool is especially useful for users and developers who need to monitor or interact with the blockchain. Block Explorer consists of three main components: + +- **Block Explorer Worker:** + This indexer service manages ZK Chain data. + Its main role is to collect data from the blockchain in real time, process this data, and populate a database that supports the API. + +- **Block Explorer API:** + This component offers an HTTP API to access structured data from the ZK Chain. + It retrieves data from the database maintained by the Block Explorer Worker. + +- **Block Explorer App:** + This is the user interface that enables users and developers to navigate and examine transactions, + blocks, batches, contracts, tokens, and other elements within the ZK Chain. diff --git a/content/10.zk-stack/10.components/20.smart-contracts/10.index.md b/content/10.zk-stack/10.components/20.smart-contracts/10.index.md new file mode 100644 index 00000000..80e9159b --- /dev/null +++ b/content/10.zk-stack/10.components/20.smart-contracts/10.index.md @@ -0,0 +1,288 @@ +--- +title: Smart Contracts +description: Explore the architecture of Layer 2 smart contracts on Ethereum and their role in the ZK Chain ecosystem. +--- + +Smart contracts on Ethereum are pivotal in enabling rollups to inherit security and decentralization. +These contracts manage the state changes of rollups by storing information on Ethereum and providing validity proofs for state transitions. +They also facilitate communication mechanisms among different layers and systems. + +In addition to the primary roles, some smart contracts also support the ZK Chain ecosystem. +Detailed information on these contracts can be found in the [Shared Bridges](/zk-stack/components/shared-bridges) section. +The Shared Bridge utilizes these smart contracts to facilitate cross-chain interactions and enhance connectivity within the blockchain environment. + +--- +## Diamond + +Technically, this L1 smart contract acts as a connector between Ethereum (L1) and a single L2. It checks the validity +proof and data availability, handles L2 <-> L1 communication, finalizes L2 state transition, and more. + +### DiamondProxy + +The main contract uses [EIP-2535](https://eips.ethereum.org/EIPS/eip-2535) diamond proxy pattern. It is an in-house +implementation that is inspired by the [mudgen reference implementation](https://github.com/mudgen/Diamond). It has no +external functions, only the fallback that delegates a call to one of the facets (target/implementation contract). So +even an upgrade system is a separate facet that can be replaced. + +Each of the facets has an associated +parameter that indicates if it is possible to freeze access to the facet. Privileged actors can freeze the **diamond** +(not a specific facet!) and all facets with the marker `isFreezable` should be inaccessible until the governor or admin +unfreezes the diamond. Note that it is a very dangerous thing since the diamond proxy can freeze the upgrade system and +then the diamond will be frozen forever. + +The diamond proxy pattern is very flexible and extendable. For now, it allows splitting implementation contracts by +their logical meaning, removes the limit of bytecode size per contract and implements security features such as +freezing. In the future, it can also be viewed as [EIP-6900](https://eips.ethereum.org/EIPS/eip-6900) for +[zkStack](https://blog.matter-labs.io/introducing-the-zk-stack-c24240c2532a), where each ZK Chain can implement a +sub-set of allowed implementation contracts. + +### GettersFacet + +Separate facet, whose only function is providing `view` and `pure` methods. It also implements +[diamond loupe](https://eips.ethereum.org/EIPS/eip-2535#diamond-loupe) which makes managing facets easier. This contract +must never be frozen. + +### AdminFacet + +Controls changing the privileged addresses such as governor and validators or one of the system parameters (L2 +bootloader bytecode hash, verifier address, verifier parameters, etc), and it also manages the freezing/unfreezing and +execution of upgrades in the diamond proxy. + +The admin facet is controlled by two entities: + +- Governance - Separate smart contract that can perform critical changes to the system as protocol upgrades. This + contract controlled by two multisigs, one managed by Matter Labs team and another will be multisig with well-respected + contributors in the crypto space. Only together they can perform an instant upgrade, the Matter Labs team can only + schedule an upgrade with delay. +- Admin - Multisig smart contract managed by Matter Labs that can perform non-critical changes to the system such as + granting validator permissions. Note, that the Admin is the same multisig as the owner of the governance. + +### MailboxFacet + +The facet that handles L2 <-> L1 communication, an overview for which can be found in +[docs](/build/developer-reference/l1-l2-interoperability). + +The Mailbox performs three functions: + +- L1 <-> L2 communication. +- Bridging native Ether to the L2 (with the launch of the Shared Bridge this will be moved) +- Censorship resistance mechanism (in the research stage). + +L1 -> L2 communication is implemented as requesting an L2 transaction on L1 and executing it on L2. This means a user +can call the function on the L1 contract to save the data about the transaction in some queue. Later on, a validator can +process it on L2 and mark it as processed on the L1 priority queue. Currently, it is used for sending information from +L1 to L2 or implementing multi-layer protocols. + +_NOTE_: While user requests the transaction from L1, the initiated transaction on L2 will have such a `msg.sender`: + +```solidity + address sender = msg.sender; + if (sender != tx.origin) { + sender = AddressAliasHelper.applyL1ToL2Alias(msg.sender); + } +``` + +where + +```solidity +uint160 constant offset = uint160(0x1111000000000000000000000000000000001111); + +function applyL1ToL2Alias(address l1Address) internal pure returns (address l2Address) { + unchecked { + l2Address = address(uint160(l1Address) + offset); + } +} + +``` + +For most of the rollups the address aliasing needs to prevent cross-chain exploits that would otherwise be possible if +we simply reused the same L1 addresses as the L2 sender. In zkEVM address derivation rule is different from the +Ethereum, so cross-chain exploits are already impossible. However, the zkEVM may add full EVM support in the future, so +applying address aliasing leaves room for future EVM compatibility. + +The L1 -> L2 communication is also used for bridging ether. The user should include a `msg.value` when initiating a +transaction request on the L1 contract. Before executing a transaction on L2, the specified address will be credited +with the funds. To withdraw funds user should call `withdraw` function on the `L2EtherToken` system contracts. This will +burn the funds on L2, allowing the user to reclaim them through the `finalizeEthWithdrawal` function on the +`MailboxFacet`. + +More about L1->L2 operations can be found +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1→L2%20ops%20on%20zkSync.md). + +L2 -> L1 communication, in contrast to L1 -> L2 communication, is based only on transferring the information, and not on +the transaction execution on L1. The full description of the mechanism for sending information from L2 to L1 can be +found +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). + +### ExecutorFacet + +A contract that accepts L2 batches, enforces data availability and checks the validity of zk-proofs. + +The state transition is divided into three stages: + +- `commitBatches` - check L2 batch timestamp, process the L2 logs, save data for a batch, and prepare data for zk-proof. +- `proveBatches` - validate zk-proof. +- `executeBatches` - finalize the state, marking L1 -> L2 communication processing, and saving Merkle tree with L2 logs. + +Each L2 -> L1 system log will have a key that is part of the following: + +```solidity +enum SystemLogKey { + L2_TO_L1_LOGS_TREE_ROOT_KEY, + TOTAL_L2_TO_L1_PUBDATA_KEY, + STATE_DIFF_HASH_KEY, + PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY, + PREV_BATCH_HASH_KEY, + CHAINED_PRIORITY_TXN_HASH_KEY, + NUMBER_OF_LAYER_1_TXS_KEY, + EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY +} + +``` + +When a batch is committed, we process L2 -> L1 system logs. Here are the invariants that are expected there: + +- In a given batch there will be either 7 or 8 system logs. The 8th log is only required for a protocol upgrade. +- There will be a single log for each key that is contained within `SystemLogKey` +- Three logs from the `L2_TO_L1_MESSENGER` with keys: +- `L2_TO_L1_LOGS_TREE_ROOT_KEY` +- `TOTAL_L2_TO_L1_PUBDATA_KEY` +- `STATE_DIFF_HASH_KEY` +- Two logs from `L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR` with keys: + - `PACKED_BATCH_AND_L2_BLOCK_TIMESTAMP_KEY` + - `PREV_BATCH_HASH_KEY` +- Two or three logs from `L2_BOOTLOADER_ADDRESS` with keys: + - `CHAINED_PRIORITY_TXN_HASH_KEY` + - `NUMBER_OF_LAYER_1_TXS_KEY` + - `EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY` +- None logs from other addresses (may be changed in the future). + +### DiamondInit + +It is a one-function contract that implements the logic of initializing a diamond proxy. It is called only once on the +diamond constructor and is not saved in the diamond as a facet. + +Implementation detail - function returns a magic value just like it is designed in +[EIP-1271](https://eips.ethereum.org/EIPS/eip-1271), but the magic value is 32 bytes in size. + +## Bridges + +Bridges are completely separate contracts from the Diamond. They are a wrapper for L1 <-> L2 communication on contracts +on both L1 and L2. Upon locking assets on L1, a request is sent to mint these bridged assets on L2. Upon burning assets +on L2, a request is sent to unlock them on L2. + +Unlike the native Ether bridging, all other assets can be bridged by the custom implementation relying on the trustless +L1 <-> L2 communication. + +### L1ERC20Bridge + +The "standard" implementation of the ERC20 token bridge. Works only with regular ERC20 tokens, i.e. not with +fee-on-transfer tokens or other custom logic for handling user balances. + +- `deposit` - lock funds inside the contract and send a request to mint bridged assets on L2. +- `claimFailedDeposit` - unlock funds if the deposit was initiated but then failed on L2. +- `finalizeWithdrawal` - unlock funds for the valid withdrawal request from L2. + +The owner of the L1ERC20Bridge is the Governance contract. + +### L2ERC20Bridge + +The L2 counterpart of the L1 ERC20 bridge. + +- `withdraw` - initiate a withdrawal by burning funds on the contract and sending a corresponding message to L1. +- `finalizeDeposit` - finalize the deposit and mint funds on L2. The function is only callable by L1 bridge. + +The owner of the L2ERC20Bridge and the contracts related to it is the Governance contract. + +### L1WethBridge + +The custom bridge exclusively handles transfers of WETH tokens between the two domains. It is designed to streamline and +enhance the user experience for bridging WETH tokens by minimizing the number of transactions required and reducing +liquidity fragmentation thus improving efficiency and user experience. + +This contract accepts WETH deposits on L1, unwraps them to ETH, and sends the ETH to the L2 WETH bridge contract, where +it is wrapped back into WETH and delivered to the L2 recipient. + +Thus, the deposit is made in one transaction, and the user receives L2 WETH that can be unwrapped to ETH. + +For withdrawals, the contract receives ETH from the L2 WETH bridge contract, wraps it into WETH, and sends the WETH to +the L1 recipient. + +The owner of the L1WethBridge contract is the Governance contract. + +### L2WethBridge + +The L2 counterpart of the L1 WETH bridge. + +The owner of the L2WethBridge and L2Weth contracts is the Governance contract. + +## Governance + +This contract manages calls for all governed zkEVM contracts on L1 and L2. Mostly, it is used for upgradability an +changing critical system parameters. The contract has minimum delay settings for the call execution. + +Each upgrade consists of two steps: + +- Scheduling - The owner can schedule upgrades in two different manners: + - Fully transparent data. All the targets, calldata, and upgrade conditions are known to the community before upgrade + execution. + - Shadow upgrade. The owner only shows the commitment to the upgrade. This upgrade type is mostly useful for fixing + critical issues in the production environment. +- Upgrade execution - the Owner or Security council can perform the upgrade with previously scheduled parameters. + - Upgrade with delay. Scheduled operations should elapse the delay period. Both the owner and Security Council can + execute this type of upgrade. + - Instant upgrade. Scheduled operations can be executed at any moment. Only the Security Council can perform this type + of upgrade. + +Please note, that both the Owner and Security council can cancel the upgrade before its execution. + +The diagram below outlines the complete journey from the initiation of an operation to its execution. + +## ValidatorTimelock + +An intermediate smart contract between the validator EOA account and the zkSync smart contract. Its primary purpose is +to provide a trustless means of delaying batch execution without modifying the main zkSync contract. zkSync actively +monitors the chain activity and reacts to any suspicious activity by freezing the chain. This allows time for +investigation and mitigation before resuming normal operations. + +It is a temporary solution to prevent any significant impact of the validator hot key leakage, while the network is in +the Alpha stage. + +This contract consists of four main functions `commitBatches`, `proveBatches`, `executeBatches`, and `revertBatches`, +which can be called only by the validator. + +When the validator calls `commitBatches`, the same calldata will be propagated to the zkSync contract (`DiamondProxy` +through `call` where it invokes the `ExecutorFacet` through `delegatecall`), and also a timestamp is assigned to these +batches to track the time these batches are committed by the validator to enforce a delay between committing and +execution of batches. Then, the validator can prove the already committed batches regardless of the mentioned timestamp, +and again the same calldata (related to the `proveBatches` function) will be propagated to the zkSync contract. After +the `delay` is elapsed, the validator is allowed to call `executeBatches` to propagate the same calldata to zkSync +contract. + +The owner of the ValidatorTimelock contract is the same as the owner of the Governance contract - Matter Labs multisig. + +## Allowlist + +The auxiliary contract controls the permission access list. It is used in bridges and diamond proxies to control which +addresses can interact with them in the Alpha release. Currently, it is supposed to set all permissions to public. + +The owner of the Allowlist contract is the Governance contract. + +## Deposit Limitation + +The amount of deposit can be limited. This limitation is applied on an account level and is not time-based. In other +words, each account cannot deposit more than the cap defined. The tokens and the cap can be set through governance +transactions. Moreover, there is an allow listing mechanism as well (only some allow listed accounts can call some +specific functions). So, the combination of deposit limitation and allow listing leads to limiting the deposit of the +allow listed account to be less than the defined cap. + +```solidity +struct Deposit { + bool depositLimitation; + uint256 depositCap; +} + +``` + +Currently, the limit is used only for blocking deposits of the specific token (turning on the limitation and setting the +limit to zero). And on the near future, this functionality will be completely removed. diff --git a/content/10.zk-stack/10.components/20.smart-contracts/20.system-contracts.md b/content/10.zk-stack/10.components/20.smart-contracts/20.system-contracts.md new file mode 100644 index 00000000..5c3d8fff --- /dev/null +++ b/content/10.zk-stack/10.components/20.smart-contracts/20.system-contracts.md @@ -0,0 +1,324 @@ +--- +title: System Contracts +description: An overview of system contracts in VM, detailing their unique privileges, locations, and update mechanisms. +--- + +## Introduction to system contracts + +System contracts are a specialized set of contracts in virtual machines (VM) that enhance the Ethereum Virtual Machine (EVM) +by supporting opcodes not available by default. +These contracts operate under unique conditions and have privileges not accessible to standard user contracts. + +## Location and privileges + +System contracts reside in a designated area known as _kernel space_, specifically within the address space ranging from `[0..2^16-1]`. +This limited address space sets them apart from regular user contracts and grants them special operating privileges. + +## Characteristics and deployment + +- **Pre-deployed:** System contracts are deployed at the genesis of the blockchain, ensuring they are in place from the very beginning. +- **Special Privileges:** Unlike user contracts, system contracts can perform operations that require higher security clearance or access to core VM functionalities. +- **Update Mechanism:** Modifying the code of system contracts is not a routine procedure. + Updates can only be executed through a system upgrade managed from Layer 1 (L1), ensuring controlled and secure changes. + +## Functionality and implementation + +While system contracts support a variety of operations, from simple arithmetic to complex memory and storage management, +their detailed functionalities and the exact execution requirements are documented extensively in the doc-comments within their codebases. +This documentation provides developers with insights into how these contracts operate and integrate with the broader system. + +## Role in the protocol + +System contracts form an essential part of the protocol architecture by enabling functionality that extends beyond the basic capabilities of the EVM. +The codes of all system contracts, including `DefaultAccount`, +are integral to the protocol and can only be altered through a system upgrade initiated from L1. + +--- +## SystemContext + +This contract is used to support various system parameters not included in the VM by default, i.e. `chainId`, `origin`, +`ergsPrice`, `blockErgsLimit`, `coinbase`, `difficulty`, `baseFee`, `blockhash`, `block.number`, `block.timestamp.` + +It is important to note that the constructor is **not** run for system contracts upon genesis, i.e. the constant context +values are set on genesis explicitly. Notably, if in the future we want to upgrade the contracts, we will do it via +`ContractDeployer` and so the constructor will be run. + +This contract is also responsible for ensuring validity and consistency of batches, L2 blocks and virtual blocks. The +implementation itself is rather straightforward, but to better understand this contract, please take a look at the +[page](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20&%20L2%20blocks%20on%20zkSync.md) +about the block processing on zkSync. + +## AccountCodeStorage + +The code hashes of accounts are stored inside the storage of this contract. Whenever a VM calls a contract with address +`address` it retrieves the value under storage slot `address` of this system contract, if this value is non-zero, it +uses this as the code hash of the account. + +Whenever a contract is called, the VM asks the operator to provide the preimage for the codehash of the account. That is +why data availability of the code hashes is paramount. + +### Constructing vs Non-Constructing Code Hash + +In order to prevent contracts from being able to call a contract during its construction, we set the marker (i.e. second +byte of the bytecode hash of the account) as `1`. This way, the VM will ensure that whenever a contract is called +without the `isConstructor` flag, the bytecode of the default account (i.e. EOA) will be substituted instead of the +original bytecode. + +## BootloaderUtilities + +This contract contains some of the methods which are needed purely for the bootloader functionality but were moved out +from the bootloader itself for the convenience of not writing this logic in Yul. + +## DefaultAccount + +Whenever a contract that does **not** both: + +- belong to kernel space +- have any code deployed on it (the value stored under the corresponding storage slot in `AccountCodeStorage` is zero) + +The code of the default account is used. The main purpose of this contract is to provide EOA-like experience for both +wallet users and contracts that call it, i.e. it should not be distinguishable (apart of spent gas) from EOA accounts on +Ethereum. + +## Ecrecover + +The implementation of the ecrecover precompile. It is expected to be used frequently, so written in pure yul with a +custom memory layout. + +The contract accepts the calldata in the same format as EVM precompile, i.e. the first 32 bytes are the hash, the next +32 bytes are the v, the next 32 bytes are the r, and the last 32 bytes are the s. + +It also validates the input by the same rules as the EVM precompile: + +- The v should be either 27 or 28, +- The r and s should be less than the curve order. + +After that, it makes a precompile call and returns empty bytes if the call failed, and the recovered address otherwise. + +## Empty contracts + +Some of the contracts are relied upon to have EOA-like behaviour, i.e. they can be always called and get the success +value in return. An example of such address is 0 address. We also require the bootloader to be callable so that the +users could transfer ETH to it. + +For these contracts, we insert the `EmptyContract` code upon genesis. It is basically a noop code, which does nothing +and returns `success=1`. + +## SHA256 & Keccak256 + +Note that, unlike Ethereum, keccak256 is a precompile (_not an opcode_) on zkSync. + +These system contracts act as wrappers for their respective crypto precompile implementations. They are expected to be +used frequently, especially keccak256, since Solidity computes storage slots for mapping and dynamic arrays with its +help. That's why we wrote contracts on pure yul with optimizing the short input case. + +The system contracts accept the input and transform it into the format that the zk-circuit expects. This way, some of +the work is shifted from the crypto to smart contracts, which are easier to audit and maintain. + +Both contracts should apply padding to the input according to their respective specifications, and then make a +precompile call with the padded data. All other hashing work will be done in the zk-circuit. It's important to note that +the crypto part of the precompiles expects to work with padded data. This means that a bug in applying padding may lead +to an unprovable transaction. + +## L2EthToken & MsgValueSimulator + +Unlike Ethereum, zkEVM does not have any notion of any special native token. That’s why we have to simulate operations +with Ether via two contracts: `L2EthToken` & `MsgValueSimulator`. + +`L2EthToken` is a contract that holds the balances of ETH for the users. This contract does NOT provide ERC20 interface. +The only method for transferring Ether is `transferFromTo`. It permits only some system contracts to transfer on behalf +of users. This is needed to ensure that the interface is as close to Ethereum as possible, i.e. the only way to transfer +ETH is by doing a call to a contract with some `msg.value`. This is what `MsgValueSimulator` system contract is for. + +Whenever anyone wants to do a non-zero value call, they need to call `MsgValueSimulator` with: + +- The calldata for the call equal to the original one. +- Pass `value` and whether the call should be marked with `isSystem` in the first extra abi params. +- Pass the address of the callee in the second extraAbiParam. + +## KnownCodeStorage + +This contract is used to store whether a certain code hash is “known”, i.e. can be used to deploy contracts. On zkSync, +the L2 stores the contract’s code _hashes_ and not the codes themselves. Therefore, it must be part of the protocol to +ensure that no contract with unknown bytecode (i.e. hash with an unknown preimage) is ever deployed. + +The factory dependencies field provided by the user for each transaction contains the list of the contract’s bytecode +hashes to be marked as known. We can not simply trust the operator to “know” these bytecodehashes as the operator might +be malicious and hide the preimage. We ensure the availability of the bytecode in the following way: + +- If the transaction comes from L1, i.e. all its factory dependencies have already been published on L1, we can simply + mark these dependencies as “known”. +- If the transaction comes from L2, i.e. (the factory dependencies are yet to publish on L1), we make the user pays by + burning ergs proportional to the bytecode’s length. After that, we send the L2→L1 log with the bytecode hash of the + contract. It is the responsibility of the L1 contracts to verify that the corresponding bytecode hash has been + published on L1. + +It is the responsibility of the `ContractDeployer` system contract to deploy only +those code hashes that are known. + +The KnownCodesStorage contract is also responsible for ensuring that all the “known” bytecode hashes are also valid. + +## ContractDeployer & ImmutableSimulator + +`ContractDeployer` is a system contract responsible for deploying contracts on zkSync. It is better to understand how it +works in the context of how the contract deployment works on zkSync. Unlike Ethereum, where `create`/`create2` are +opcodes, on zkSync these are implemented by the compiler via calls to the ContractDeployer system contract. + +For additional security, we also distinguish the deployment of normal contracts and accounts. That’s why the main +methods that will be used by the user are `create`, `create2`, `createAccount`, `create2Account`, which simulate the +CREATE-like and CREATE2-like behavior for deploying normal and account contracts respectively. + +### **Address derivation** + +Each rollup that supports L1→L2 communications needs to make sure that the addresses of contracts on L1 and L2 do not +overlap during such communication (otherwise it would be possible that some evil proxy on L1 could mutate the state of +the L2 contract). Generally, rollups solve this issue in two ways: + +- XOR/ADD some kind of constant to addresses during L1→L2 communication. That’s how rollups closer to full + EVM-equivalence solve it, since it allows them to maintain the same derivation rules on L1 at the expense of contract + accounts on L1 having to redeploy on L2. +- Have different derivation rules from Ethereum. That is the path that zkSync has chosen, mainly because since we have + different bytecode than on EVM, CREATE2 address derivation would be different in practice anyway. + +You can see the rules for our address derivation in `getNewAddressCreate2`/ `getNewAddressCreate` methods in the +ContractDeployer. + +Note, that we still add a certain constant to the addresses during L1→L2 communication in order to allow ourselves some +way to support EVM bytecodes in the future. + +### **Deployment nonce** + +On Ethereum, the same nonce is used for CREATE for accounts and EOA wallets. On zkSync this is not the case, we use a +separate nonce called “deploymentNonce” to track the nonces for accounts. This was done mostly for consistency with +custom accounts and for having multicalls feature in the future. + +### **General process of deployment** + +- After incrementing the deployment nonce, the contract deployer must ensure that the bytecode that is being deployed is + available. +- After that, it puts the bytecode hash with a + [special constructing marker](#constructing-vs-non-constructing-code-hash) as code for the address of the + to-be-deployed contract. +- Then, if there is any value passed with the call, the contract deployer passes it to the deployed account and sets the + `msg.value` for the next as equal to this value. +- Then, it uses `mimic_call` for calling the constructor of the contract out of the name of the account. +- It parses the array of immutables returned by the constructor (we’ll talk about immutables in more details later). +- Calls `ImmutableSimulator` to set the immutables that are to be used for the deployed contract. + +Note how it is different from the EVM approach: on EVM when the contract is deployed, it executes the initCode and +returns the deployedCode. On zkSync, contracts only have the deployed code and can set immutables as storage variables +returned by the constructor. + +### **Constructor** + +On Ethereum, the constructor is only part of the initCode that gets executed during the deployment of the contract and +returns the deployment code of the contract. On zkSync, there is no separation between deployed code and constructor +code. The constructor is always a part of the deployment code of the contract. In order to protect it from being called, +the compiler-generated contracts invoke constructor only if the `isConstructor` flag provided (it is only available for +the system contracts). + +After execution, the constructor must return an array of: + +```solidity +struct ImmutableData { + uint256 index; + bytes32 value; +} + +``` + +basically denoting an array of immutables passed to the contract. + +### **Immutables** + +Immutables are stored in the `ImmutableSimulator` system contract. The way how `index` of each immutable is defined is +part of the compiler specification. This contract treats it simply as mapping from index to value for each particular +address. + +Whenever a contract needs to access a value of some immutable, they call the +`ImmutableSimulator.getImmutable(getCodeAddress(), index)`. Note that on zkSync it is possible to get the current +execution address. + +### **Return value of the deployment methods** + +If the call succeeded, the address of the deployed contract is returned. If the deploy fails, the error bubbles up. + +## DefaultAccount + +The implementation of the default account abstraction. This is the code that is used by default for all addresses that +are not in kernel space and have no contract deployed on them. This address: + +- Contains minimal implementation of our account abstraction protocol. Note that it supports the + [built-in paymaster flows](https://v2-docs.zksync.io/dev/developer-guides/aa.html#paymasters). +- When anyone (except bootloader) calls it, it behaves in the same way as a call to an EOA, i.e. it always returns + `success = 1, returndatasize = 0` for calls from anyone except for the bootloader. + +## L1Messenger + +A contract used for sending arbitrary length L2→L1 messages from zkSync to L1. While zkSync natively supports a rather +limited number of L1→L2 logs, which can transfer only roughly 64 bytes of data a time, we allowed sending +nearly-arbitrary length L2→L1 messages with the following trick: + +The L1 messenger receives a message, hashes it and sends only its hash as well as the original sender via L2→L1 log. +Then, it is the duty of the L1 smart contracts to make sure that the operator has provided full preimage of this hash in +the commitment of the batch. + +The `L1Messenger` is also responsible for validating the total pubdata to be sent on L1. You can read more about it +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). + +## NonceHolder + +Serves as storage for nonces for our accounts. Besides making it easier for operator to order transactions (i.e. by +reading the current nonces of account), it also serves a separate purpose: making sure that the pair (address, nonce) is +always unique. + +It provides a function `validateNonceUsage` which the bootloader uses to check whether the nonce has been used for a +certain account or not. Bootloader enforces that the nonce is marked as non-used before validation step of the +transaction and marked as used one afterwards. The contract ensures that once marked as used, the nonce can not be set +back to the “unused” state. + +Note that nonces do not necessarily have to be monotonic (this is needed to support more interesting applications of +account abstractions, e.g. protocols that can start transactions on their own, tornado-cash like protocols, etc). That’s +why there are two ways to set a certain nonce as “used”: + +- By incrementing the `minNonce` for the account (thus making all nonces that are lower than `minNonce` as used). +- By setting some non-zero value under the nonce via `setValueUnderNonce`. This way, this key will be marked as used and + will no longer be allowed to be used as nonce for accounts. This way it is also rather efficient, since these 32 bytes + could be used to store some valuable information. + +The accounts upon creation can also provide which type of nonce ordering do they want: Sequential (i.e. it should be +expected that the nonces grow one by one, just like EOA) or Arbitrary, the nonces may have any values. This ordering is +not enforced in any way by system contracts, but it is more of a suggestion to the operator on how it should order the +transactions in the mempool. + +## EventWriter + +A system contract responsible for emitting events. + +It accepts in its 0-th extra abi data param the number of topics. In the rest of the extraAbiParams he accepts topics +for the event to emit. Note, that in reality the event the first topic of the event contains the address of the account. +Generally, the users should not interact with this contract directly, but only through Solidity syntax of `emit`-ing new +events. + +## Compressor + +One of the most expensive resource for a rollup is data availability, so in order to reduce costs for the users we +compress the published pubdata in several ways: + +- We compress published bytecodes. +- We compress state diffs. + +This contract contains utility methods that are used to verify the correctness of either bytecode or state diff +compression. You can read more on how we compress state diffs and bytecodes in the corresponding +[document](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +## Known issues to be resolved + +The protocol, while conceptually complete, contains some known issues which will be resolved in the short to middle +term. + +- Fee modeling is yet to be improved. More on it in the + [document](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/zkSync%20fee%20model.md) + on the fee model. +- We may add some kind of default implementation for the contracts in the kernel space (i.e. if called, they wouldn’t + revert but behave like an EOA). diff --git a/content/10.zk-stack/10.components/20.smart-contracts/_dir.yml b/content/10.zk-stack/10.components/20.smart-contracts/_dir.yml new file mode 100644 index 00000000..84c7a8da --- /dev/null +++ b/content/10.zk-stack/10.components/20.smart-contracts/_dir.yml @@ -0,0 +1 @@ +title: Smart & System Contracts diff --git a/content/10.zk-stack/10.components/30.shared-bridges.md b/content/10.zk-stack/10.components/30.shared-bridges.md new file mode 100644 index 00000000..9a57fdd0 --- /dev/null +++ b/content/10.zk-stack/10.components/30.shared-bridges.md @@ -0,0 +1,245 @@ +--- +title: Shared Bridges +description: Explore the design and functionality of Shared Bridges within Ethereum's ZK Chain ecosystem, including future features and system upgrades. +--- + +Ethereum's evolving infrastructure is shifting towards a rollup-centric future, +focusing on an interconnected ecosystem of zero-knowledge Ethereum Virtual Machines (zkEVMs), collectively known as ZK Chains. +This transformation relies on robust Layer 1 (L1) smart contracts to maintain coherence and security across the ecosystem. + +## ZK Stack approach + +Our approach to developing this ecosystem involves specific architectures and interfaces of L1 smart contracts. +These contracts are designed to support the changing needs of the Ethereum landscape and facilitate the integration of new features as the technology advances. + +### High-level design goals + +- **Permissionless ZK Chain Launch:** + Enable the launch of new ZK Chains within the ecosystem without requiring permissions, promoting an open and inclusive network. +- **Unified Liquidity through Hyperbridges:** + Establish bridges that allow seamless asset liquidity across the entire ecosystem, making asset management more straightforward and efficient. +- **Simplified Multi-chain Contract Development:** + Provide easy access to traditional bridges and supporting architecture, facilitating the development of multi-chain smart contracts. + +### Trust and verification + +ZK Chains must meet common trust and verification standards. +A single set of L1 smart contracts will manage the proof verification across all ZK Chains. +Any upgrade to the proof system will necessitate a simultaneous upgrade across all chains to maintain uniformity and security. + +### Bridging and liquidity + +- **Bridgehub Contract on L1:** + This contract will link asset bridges to all ZK Chains and manage the ecosystem's ETH reserves. It acts as the central node in the bridging architecture. +- **System Contracts on ZK Chains:** + Special system contracts will be added to ZK Chains to enable bridging functionalities, + allowing them to interact smoothly with the Bridgehub and manage assets efficiently. + +## Modularity and future compatibility + +The design aims to be as modular as possible, providing developers with the flexibility to alter the architecture based on evolving needs, +such as changes in consensus mechanisms, staking models, and data availability requirements. + +### Anticipating future developments + +- **Layer 3 Integrations and Proof Aggregation:** + Future updates will consider Layer 3 technologies and proof aggregation methods to enhance scalability and efficiency. +- **Alternative State Transition Contracts and ZK IP:** + The design will accommodate alternative State Transition (ST) contracts and ZK IP, + which will ensure unified liquidity and interoperability among all state transitions when these features become available. + +The Shared Bridge and the associated smart contracts are foundational to the sustainable growth of the ZK Chain ecosystem. +For further details on ZK Chains and their operational dynamics, refer to our [blog post on ZK Chains](https://blog.matter-labs.io/introduction-to-hyperchains-fdb33414ead7) +or the section on [ZK Chains](/zk-stack/concepts/zk-chains) + +--- +## Architecture + +![Contracts](/images/zk-stack/contracts-external.png) + +## Components: Bridgehub + +- Acts as a hub for bridges, so that they have a single point of communication with all ZK Chain contracts. This + allows L1 assets to be locked in the same contract for all ZK Chains, including L3s and validiums. The `Bridgehub` + also implements the following: +- `Registry` This is where ZK Chains can register, starting in a permissioned manner, but with the goal to be + permissionless in the future. This is where their `chainID` is determined. L3s will also register here. This + `Registry` is also where State Transition contracts should register. Each chain has to specify its desired ST when + registering (Initially, only one will be available). + + ```solidity + function newChain( + uint256 _chainId, + address _stateTransition + ) external returns (uint256 chainId); + + function newStateTransition(address _stateTransition) external; + ``` + +- `BridgehubMailbox` routes messages to the Diamond proxy’s Mailbox facet based on chainID + + - Same as the current zkEVM + [Mailbox](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/contracts/interfaces/IMailbox.sol), just + with chainId, + - Ether needs to be deposited and withdrawn from here. + - This is where L2 transactions can be requested. + + ```solidity + function requestL2Transaction( + uint256 _chainId, + address _contractL2, + uint256 _l2Value, + bytes calldata _calldata, + uint256 _l2GasLimit, + uint256 _l2GasPerPubdataByteLimit, + bytes[] calldata _factoryDeps, + address _refundRecipient + ) public payable override returns (bytes32 canonicalTxHash) { + address proofChain = bridgeheadStorage.proofChain[_chainId]; + canonicalTxHash = IProofChain(proofChain).requestL2TransactionBridgehead( + _chainId, + msg.value, + msg.sender, + _contractL2, + _l2Value, + _calldata, + _l2GasLimit, + _l2GasPerPubdataByteLimit, + _factoryDeps, + _refundRecipient + ); + } + ``` + +- `Hypermailbox` + - This will allow general message passing (L2<>L2, L2<>L3, etc). This is where the `Mailbox` sends the `Hyperlogs`. + `Hyperlogs` are commitments to these messages sent from a single ZK Chain. `Hyperlogs` are aggregated into a + `HyperRoot` in the `HyperMailbox`. + - This component has not been implemented yet + +### Main Asset Shared Bridges + +- Some assets have to be natively supported (ETH, WETH) and it also makes sense to support some generally accepted token + standards (ERC20 tokens), as this makes it easy to bridge those tokens (and ensures a single version of them exists on + the ZK Chain). These canonical asset contracts are deployed from L1 by a bridge shared by all ZK Chains. This is + where assets are locked on L1. These bridges use the Bridgehub to communicate with all ZK Chains. Currently, these + bridges are the `WETH` and `ERC20` bridges. + + - The pair on L2 is deployed from L1. The hash of the factory dependencies is stored on L1, and when a ZK Chain + wants to register, it can passes it in for deployment, it is verified, and the contract is deployed on L2. The + actual token contracts on L2 are deployed by the L2 bridge. + + ```solidity + function initializeChain( + uint256 _chainId, + bytes[] calldata _factoryDeps, + uint256 _deployBridgeImplementationFee, + uint256 _deployBridgeProxyFee + ) external payable { + .... + // Deploy L2 bridge proxy contract + l2Bridge[_chainId] = BridgeInitializationHelper.requestDeployTransaction( + _chainId, + bridgehead, + _deployBridgeProxyFee, + l2WethBridgeProxyBytecodeHash, + l2WethBridgeProxyConstructorData, + // No factory deps are needed for L2 bridge proxy, because it is already passed in the previous step + new bytes[](0) + ); + ``` + +## Components: State Transition + +- `StateTransition` A state transition manages proof verification and DA for multiple chains. It also implements the + following functionalities: + - `StateTransitionRegistry` The ST is shared for multiple chains, so initialization and upgrades have to be the same + for all chains. Registration is not permissionless but happens based on the registrations in the bridgehub’s + `Registry`. At registration a `DiamondProxy` is deployed and initialized with the appropriate `Facets` for each + ZK Chain. + - `Facets` and `Verifier` are shared across chains that relies on the same ST: `Base`, `Executor` , `Getters`, `Admin` + , `Mailbox.`The `Verifier` is the contract that actually verifies the proof, and is called by the `Executor`. + - Upgrade Mechanism The system requires all chains to be up-to-date with the latest implementation, so whenever an + update is needed, we have to “force” each chain to update, but due to decentralization, we have to give each chain a + time frame (more information in the + [Upgrade Mechanism](https://www.notion.so/ZK-Stack-shared-bridge-alpha-version-a37c4746f8b54fb899d67e474bfac3bb?pvs=21) + section). This is done in the update mechanism contract, this is where the bootloader and system contracts are + published, and the `ProposedUpgrade` is stored. Then each chain can call this upgrade for themselves as needed. + After the deadline is over, the not-updated chains are frozen, that is, cannot post new proofs. Frozen chains can + unfreeze by updating their proof system. +- Each chain has a `DiamondProxy`. + - The [Diamond Proxy](https://eips.ethereum.org/EIPS/eip-2535) is the proxy pattern that is used for the chain + contracts. A diamond proxy points to multiple implementation contracts called facets. Each selector is saved in the + proxy, and the correct facet is selected and called. + - In the future the DiamondProxy can be configured by picking alternative facets e.g. Validiums will have their own + `Executor` + +## Components: Chain Specific Contracts + +- A chain might implement its own specific consensus mechanism. This needs its own contracts. Only this contract will be + able to submit proofs to the State Transition contract. +- Currently, the `ValidatorTimelock` is an example of such a contract. + +### Components interactions + +In this section, we will present some diagrams showing the interaction of different components. + +#### New Chain + +A chain registers in the Bridgehub, this is where the chain ID is determined. The chain’s governor specifies the State +Transition that they plan to use. In the first version only a single State Transition contract will be available for +use, our with Boojum proof verification. + +At initialization we prepare the `StateTransitionChain` contract. We store the genesis batch hash in the ST contract, +all chains start out with the same state. A diamond proxy is deployed and initialised with this initial value, along +with predefined facets which are made available by the ST contract. These facets contain the proof verification and +other features required to process proofs. The chain ID is set in the VM in a special system transaction sent from L1. + +<!--![newChain.png](./img/newChain.png) Image outdated--> + +## Components: WETH Contract + +Ether, the native gas token is part of the core system contracts, so deploying it is not necessary. But WETH is just a +smart contract, it needs to be deployed and initialised. This happens from the L1 WETH bridge. This deploys on L2 the +corresponding bridge and ERC20 contract. This is deployed from L1, but the L2 address is known at deployment time. + +![deployWeth.png](/images/zk-stack/deploy-weth.png) + +## Components: Deposit WETH + +The user can deposit WETH into the ecosystem using the WETH bridge on L1. The destination chain ID has to be specified. +The Bridgehub unwraps the WETH, and keeps the ETH, and send a message to the destination L2 to mint WETH to the +specified address. + +![depositWeth.png](/images/zk-stack/deposit-weth.png) + +--- + +## Common Standards and Upgrades + +In this initial phase, ZK Chains have to follow some common standards, so that they can trust each other. This means +all chains start out with the same empty state, they have the same VM implementations and proof systems, asset contracts +can trust each on different chains, and the chains are upgraded together. We elaborate on the shared upgrade mechanism +here. + +### Upgrade mechanism + +Currently, there are three types of upgrades for zkEVM. Normal upgrades (used for new features) are initiated by the +Governor (a multisig) and are public for a certain timeframe before they can be applied. Shadow upgrades are similar to +normal upgrades, but the data is not known at the moment the upgrade is proposed, but only when executed (they can be +executed with the delay, or instantly if approved by the security council). Instant upgrades (used for security issues), +on the other hand happen quickly and need to be approved by the Security Council in addition to the Governor. For +ZK Chains the difference is that upgrades now happen on multiple chains. This is only a problem for shadow upgrades - +in this case, the chains have to tightly coordinate to make all the upgrades happen in a short time frame, as the +content of the upgrade becomes public once the first chain is upgraded. The actual upgrade process is as follows: + +1. Prepare Upgrade for all chains: + - The new facets and upgrade contracts have to be deployed, + - The upgrade’ calldata (diamondCut, initCalldata with ProposedUpgrade) is hashed on L1 and the hash is saved. +2. Upgrade specific chain + - The upgrade has to be called on the specific chain. The upgrade calldata is passed in as calldata and verified. The + protocol version is updated. + - Ideally, the upgrade will be very similar for all chains. If it is not, a smart contract can calculate the + differences. If this is also not possible, we have to set the `diamondCut` for each chain by hand. +3. Freeze not upgraded chains + - After a certain time the chains that are not upgraded are frozen. diff --git a/content/10.zk-stack/10.components/40.sequencer-server.md b/content/10.zk-stack/10.components/40.sequencer-server.md new file mode 100644 index 00000000..df296525 --- /dev/null +++ b/content/10.zk-stack/10.components/40.sequencer-server.md @@ -0,0 +1,51 @@ +--- +title: Sequencer / Server +description: Overview of the zkSync Sequencer, detailing its components and functionalities for monitoring and maintaining L1 and L2 operations. +--- + +The zkSync Sequencer is a complex system composed of several services and modules that work together to monitor Ethereum Layer 1 (L1), +maintain Layer 2 (L2) state, and manage the order of incoming transactions. +This system is critical for the operation and reliability of the zkSync protocol. + +For more detailed information about each module, visit the [zkSync Era GitHub repository](%%zk_git_repo_zksync-era%%). + +## RPC services + +RPC Services serve as the primary user interface for interacting with the zkSync server. They include: + +- **HttpApi**: + This is the HTTP public Web3 API that allows users to make requests via traditional HTTP methods. +- **WsApi**: + The WebSocket Web3 API supports real-time interactions and includes PubSub capabilities, enhancing user experience by providing faster, push-based updates. + +## ETH operator + +The ETH Operator module interfaces directly with L1, performing critical observations and transactions: + +- **EthWatcher**: + Monitors L1 for specific events such as deposits and system upgrades, ensuring the Sequencer remains synchronized with Ethereum's mainnet. +- **EthTxAggregator**: + Responsible for batching multiple L2 transactions and preparing them for L1 submission. + This includes crucial operations like committing blocks, proving blocks, and executing blocks. +- **EthTxManager**: + Handles the signing and dispatch of transactions to L1. + This module is also tasked with monitoring the transactions to address issues such as transaction failures or low gas prices + by resending transactions as necessary. + +## Sequencer + +The Sequencer module processes incoming transactions on zkSync, organizes them into blocks, +and ensures they comply with the constraints of the proving system. +It includes: + +- **Tree and TreeBackup**: + These components maintain a local instance of RocksDB that stores the complete L2 storage tree, + continuously updating to reflect the latest state root hash. +- **StateKeeper**: + Executes transactions and securely stores sealed blocks in the local RocksDB database, ensuring data integrity and state continuity. + +## Contract verification API + +This service handles requests for smart contract verification. +It validates contracts, ensures they meet specific standards, +and provides the necessary code and ABIs for verified contracts, fostering transparency and trust within the ecosystem. diff --git a/content/10.zk-stack/10.components/50.zksync-evm/00.index.md b/content/10.zk-stack/10.components/50.zksync-evm/00.index.md new file mode 100644 index 00000000..c5de01d3 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/00.index.md @@ -0,0 +1,54 @@ +--- +title: Overview +description: Learn about the zkSync VM's functionality, its role in the zkStack compared to the EVM in Ethereum, and how it handles smart contracts and transaction fees. +--- + +The zkSync VM (zero-knowledge Ethereum Virtual Machine) is an essential component of the ZK Stack, +designed to execute transactions similarly to the Ethereum Virtual Machine (EVM) but with a unique set of functionalities tailored to the needs of rollups. + +## zkSync VM and its role in ZK Stack + +Unlike the EVM that operates Ethereum's smart contracts directly, +the zkSync VM is specifically engineered to efficiently run the State Transition Function (STF), +which is vital for producing proofs of correct execution in a rollup context. +This STF is defined by the [Bootloader](/zk-stack/components/zksync-evm/bootloader), which implements and runs it. + +### Execution of transactions + +Transactions executed by the zkSync VM are primarily written in native zkSync VM bytecode, enabling straightforward execution. +In the future, the system will also accommodate EVM bytecode through an efficient interpreter built into the zkSync VM. + +--- +## Special features of zkSync VM + +The zkSync VM incorporates several specialized features to meet the demands of rollups, including: + +- **Storage and Gas Metering:** + Adaptations in storage handling and gas metering to suit the rollup model. +- **Precompiles and System Contracts:** + The zkSync VM supports predeployed contracts known as precompiles and system contracts. + While both are integral, system contracts have special permissions and are mainly invoked by the Bootloader, not by user transactions. + These system contracts are crucial for managing specialized operations and are outlined in more detail [here](/zk-stack/components/smart-contracts/system-contracts). + +--- +## User-facing features + +To enhance user experience, the zkSync VM supports account abstraction, allowing users to customize how transaction fees are paid. +This flexibility is part of the zkStack's efforts to improve usability and accessibility. + +--- +## Fee model + +The zkSync VM's fee model differs significantly from Ethereum's due to the unique requirements of running a rollup. +It considers the various costs associated with rollup operations, including: + +- **Data and Proof Execution Costs on L1:** + Ensures that the rollup's data and proof computations are financially sustainable when settled on Ethereum. +- **Sequencer and Prover Costs:** + Covers expenses related to sequencing transactions and generating zero-knowledge proofs. + +--- +The zkEVM plays a critical role in the zkStack by ensuring efficient execution of transactions within the unique operational environment of rollups. +Its specialized features and innovative fee model are designed to maintain functionality and scalability while providing a seamless experience for users. +For further exploration of these concepts, consider reading more about +[account abstraction](/zk-stack/concepts/account-abstraction) and the [fee mechanism](/zk-stack/concepts/fee-mechanism) in our documentation. diff --git a/content/10.zk-stack/10.components/50.zksync-evm/10.bootloader.md b/content/10.zk-stack/10.components/50.zksync-evm/10.bootloader.md new file mode 100644 index 00000000..de9eab73 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/10.bootloader.md @@ -0,0 +1,343 @@ +--- +title: Bootloader +description: Overview of the bootloader in zkSync, which processes transactions in batch mode for efficiency, including its role and operational mechanics. +--- + +In standard Ethereum clients, the process of executing blocks involves selecting and validating transactions one by one, +executing them, and then applying the resulting state changes to the blockchain. +This method is suitable for Ethereum's architecture but would be inefficient +for zkSync due to the need for running a complete proving workflow for each transaction. + +### Why zkSync Uses a Bootloader + +To address this inefficiency, zkSync employs a bootloader. +This component allows for processing not just one transaction at a time but an entire batch of transactions as a single large operation. +This approach is similar to how an EntryPoint works under EIP4337, +which also manages transactions in arrays to support the Account Abstraction protocol. + +You can learn more about [Batches & L2 blocks on zkSync](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20%26%20L2%20blocks%20on%20zkSync.md). + +### Operational Mechanism of the Bootloader + +The bootloader's code is not stored on Layer 2 (L2) but its hash is stored on Layer 1 (L1) and can only be modified through a system upgrade. +This setup ensures that the bootloader functions as a kind of "formal" address that provides context and identity to `this`, `msg.sender`, +and similar references during transaction processing. +If someone interacts with this address, for instance, to handle transaction fees, it triggers the EmptyContract’s code. + +### Bootloader Variants: Playground vs Proved + +While the same bootloader implementation is used across both mainnet operations and for testing activities like emulating ethCalls, +only the **proved** bootloader is used for official batch processing. +This ensures reliability and security in batch-building on the mainnet. + +This section focuses on describing the **proved** bootloader. + +--- +## Batch Start + +The state of the bootloader is equivalent to the state of a contract transaction with empty calldata. +The only difference is that it starts with all the possible memory pre-allocated (to avoid costs for memory expansion). + +For additional efficiency, the bootloader receives its parameters inside its memory. +This is the only point of non-determinism: the bootloader _starts with its memory pre-filled with any data the operator wants_. +That’s why the operator is responsible for validating the correctness of the bootloader. +The operator should never rely on the initial contents of the memory to be correct & valid. + +For instance, for each transaction, we check that it is +[properly ABI-encoded](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3058) +and that the transactions +[go exactly one after another](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3736). +We also ensure that transactions do not exceed the limits of the memory space allowed for transactions. + +## Transaction Types and Validation + +While the main transaction format is the internal [`Transaction` +format](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/libraries/TransactionHelper.sol#L25), +it is a struct that is used to represent various kinds of transactions types. It contains a lot of `reserved` fields +that could be used depending in the future types of transactions without need for AA to change the interfaces of their +contracts. + +The exact type of the transaction is marked by the `txType` field of the transaction type. There are 6 types currently +supported: + +- `txType`: 0. It means that this transaction is of legacy transaction type. The following restrictions are enforced: +- `maxFeePerErgs=getMaxPriorityFeePerErg` since it is pre-EIP-1559 tx type. +- `reserved1..reserved4` as well as `paymaster` are 0. `paymasterInput` is zero. +- Note, that unlike type 1 and type 2 transactions, `reserved0` field can be set to a non-zero value, denoting that this + legacy transaction is EIP-155-compatible and its RLP encoding (as well as signature) should contain the `chainId` of + the system. +- `txType`: 1. It means that the transaction is of type 1, i.e. transactions access list. zkSync does not support access + lists in any way, so no benefits of fulfilling this list will be provided. The access list is assumed to be empty. The + same restrictions as for type 0 are enforced, but also `reserved0` must be 0. +- `txType`: 2. It is EIP1559 transactions. The same restrictions as for type 1 apply, but now `maxFeePerErgs` may not be + equal to `getMaxPriorityFeePerErg`. +- `txType`: 113. It is zkSync transaction type. This transaction type is intended for AA support. The only restriction + that applies to this transaction type: fields `reserved0..reserved4` must be equal to 0. +- `txType`: 254. It is a transaction type that is used for upgrading the L2 system. This is the only type of transaction + is allowed to start a transaction out of the name of the contracts in kernel space. +- `txType`: 255. It is a transaction that comes from L1. There are almost no restrictions explicitly imposed upon this + type of transaction, since the bootloader at the end of its execution sends the rolling hash of the executed priority + transactions. The L1 contract ensures that the hash did indeed match the + [hashes of the priority transactions on L1](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/contracts/ethereum/contracts/zksync/facets/Executor.sol#L282). + +You can also read more on L1->L2 transactions and upgrade transactions +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +However, as already stated, the bootloader’s memory is not deterministic and the operator is free to put anything it +wants there. For all of the transaction types above the restrictions are imposed in the following +([method](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L2828)), +which is called before starting processing the transaction. + +## Bootloader Memory Structure + +The bootloader expects the following structure of the memory (here by word we denote 32-bytes, the same machine word as +on EVM): + +### Batch Information + +The first 8 words are reserved for the batch information provided by the operator. + +- `0` — The address of the operator (the beneficiary of the transactions). +- `1` — The hash of the previous batch. Its validation will be explained later on. +- `2` — The timestamp of the current batch. Its validation will be explained later on. +- `3` — The number of the new batch. +- `4` — The L1 gas price provided by the operator. +- `5` — The “fair” price for L2 gas, i.e. the price below which the `baseFee` of the batch should not fall. + For now, it is provided by the operator, but it in the future it may become hardcoded. +- `6` — The base fee for the batch that is expected by the operator. + While the base fee is deterministic, it is still provided to the bootloader just to make sure that the data + that the operator has coincides with the data provided by the bootloader. +- `7` — Reserved word. Unused on proved batch. + +The batch information slots +[are used at the beginning of the batch](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3629). +Once read, these slots can be used for temporary data. + +### Temporary Data Descriptions + +(This temporary data are used for debug and transaction processing purposes.) + +- `[8..39]` – reserved slots for debugging purposes +- `[40..72]` – slots for holding the paymaster context data for the current transaction. The role of the paymaster + context is similar to the [EIP4337](https://eips.ethereum.org/EIPS/eip-4337)’s one. You can read more about it in the + account abstraction documentation. +- `[73..74]` – slots for signed and explorer transaction hash of the currently processed L2 transaction. +- `[75..110]` – 36 slots for the calldata for the KnownCodesContract call. +- `[111..1134]` – 1024 slots for the refunds for the transactions. +- `[1135..2158]` – 1024 slots for the overhead for batch for the transactions. This overhead is suggested by the + operator, i.e. the bootloader will still double-check that the operator does not overcharge the user. +- `[2159..3182]` – slots for the “trusted” gas limits by the operator. The user’s transaction will have at its disposal + `min(MAX_TX_GAS(), trustedGasLimit)`, where `MAX_TX_GAS` is a constant guaranteed by the system. Currently, it is + equal to 80 million gas. In the future, this feature will be removed. +- `[3183..7282]` – slots for storing L2 block info for each transaction. You can read more on the difference L2 blocks + and batches + [here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20&%20L2%20blocks%20on%20zkSync.md). +- `[7283..40050]` – slots used for compressed bytecodes each in the following format: + - 32 bytecode hash + - 32 zeroes (but then it will be modified by the bootloader to contain 28 zeroes and then the 4-byte selector of the + `publishCompressedBytecode` function of the `BytecodeCompressor`) + - The calldata to the bytecode compressor (without the selector). +- `[40051..40052]` – slots where the hash and the number of current priority ops is stored. More on it in the priority + operations + [section](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +### L1Messenger Pubdata + +- `[40053..248052]` – slots where the final batch pubdata is supplied to be verified by the L1Messenger. More on how the + L1Messenger system contracts handles the pubdata can be read + [here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). + +This `[40053..248052]` space is used for the calldata to the L1Messenger’s `publishPubdataAndClearState` function, which +accepts: + +- List of the user L2→L1 logs, +- Published L2→L1 messages +- Bytecodes +- List of full state diff entries, which describe how each storage slot has changed as well as compressed state diffs. + +This method will then check the correctness of the provided data and publish the hash of the correct pubdata to L1. + +Note, that while the realistic number of pubdata that can be published in a batch is 120kb, the size of the calldata to +L1Messenger may be significantly larger due to the fact that this method also accepts the original, uncompressed state diff +entries. + +These will not be published to L1, but will be used to verify the correctness of the compression. +In a worst-case scenario, the number of bytes that may be needed for this scratch space is if all the pubdata consists of repeated writes +(i.e. we’ll need only 4 bytes to include key) that turn into 0 (i.e. they’ll need only 1 byte to describe it). + +However, each of these writes in the uncompressed form will be represented as 272 byte state diff entry and so we get the number +of diffs is `120k / 5 = 24k`. This means that they will have accommodate `24k * 272 = 6528000` bytes of calldata for the +uncompressed state diffs. Adding 120k on top leaves us with roughly `6650000` bytes needed for calldata. `207813` slots +are needed to accommodate this amount of data. We round up to `208000` slots to give space for constant-size factors for +ABI-encoding, like offsets, lengths, etc. + +In theory, much more calldata could be used. +For instance, if one byte is used for `enum` index. +It is the responsibility of the operator to ensure that it can form the correct calldata for the L1Messenger. + +### Transaction Slot Descriptions + +- `[248053..250100]` words — 2048 slots for 1024 transaction’s meta descriptions (their structure is explained below). + +For internal reasons related to possible future integrations of zero-knowledge proofs about some of the contents of the +bootloader’s memory, the array of the transactions is not passed as the ABI-encoding of the array of transactions, but: + +- We have a constant maximum number of transactions. At the time of this writing, this number is 1024. +- Then, we have 1024 transaction descriptions, each ABI encoded as the following struct: + +```solidity +struct BootloaderTxDescription { + // The offset by which the ABI-encoded transaction's data is stored + uint256 txDataOffset; + // Auxiliary data on the transaction's execution. In our internal versions + // of the bootloader it may have some special meaning, but for the + // bootloader used on the mainnet it has only one meaning: whether to execute + // the transaction. If 0, no more transactions should be executed. If 1, then + // we should execute this transaction and possibly try to execute the next one. + uint256 txExecutionMeta; +} + +``` + +### **Reserved slots for the calldata for the paymaster’s postOp operation** + +- `[252149..252188]` words — 40 slots which could be used for encoding the calls for postOp methods of the paymaster. + +To avoid additional copying of transactions for calls for the account abstraction, we reserve some of the slots which +could be then used to form the calldata for the `postOp` call for the account abstraction without having to copy the +entire transaction’s data. + +### **The actual transaction’s descriptions** + +- `[252189..523261]` + +Starting from the 487312 word, the actual descriptions of the transactions start. (The struct can be found by this +[link](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/contracts/libraries/TransactionHelper.sol#L25)). +The bootloader enforces that: + +- They are correctly ABI encoded representations of the struct above. +- They are located without any gaps in memory (the first transaction starts at word 653 and each transaction goes right + after the next one). +- The contents of the currently processed transaction (and the ones that will be processed later on are untouched). + Note, that we do allow overriding data from the already processed transactions as it helps to preserve efficiency by + not having to copy the contents of the `Transaction` each time we need to encode a call to the account. + +### **VM Hook Pointers** + +- `[523261..523263]` + +These are memory slots that are used purely for debugging purposes (when the VM writes to these slots, the server side +can catch these calls and give important insight information for debugging issues). + +### **Result Pointer** + +- [523264..524287] + +These are memory slots that are used to track the success status of a transaction. If the transaction with number `i` +succeeded, the slot `2^19 - 1024 + i` will be marked as 1 and 0 otherwise. + +## Bootloader Execution Flow + +1. At the start of the batch it + [reads the initial batch information](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3629) + and + [sends the information](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3674) + about the current batch to the `SystemContext` system contract. +2. It goes through each of + [transaction’s descriptions](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3715) + and checks whether the `execute` field is set. If not, it ends processing of the transactions and ends execution of + the batch. If the execute field is non-zero, the transaction will be executed and it goes to step 3. +3. Based on the transaction’s type it decides whether the transaction is an L1 or L2 transaction and processes them + accordingly. More on the processing of the L1 transactions can be read [here](#l1-l2-transactions). More on L2 + transactions can be read [here](#l2-transactions). + +## L2 Transactions + +On zkSync, every address is a contract. Users can start transactions from their EOA accounts, because every address that +does not have any contract deployed on it implicitly contains the code defined in the +[DefaultAccount.sol](https://github.com/code-423n4/2023-10-zksync/blob/main/code/system-contracts/contracts/DefaultAccount.sol) +file. Whenever anyone calls a contract that is not in kernel space (i.e. the address is ≥ 2^16) and does not have any +contract code deployed on it, the code for `DefaultAccount` will be used as the contract’s code. + +Note, that if you call an account that is in kernel space and does not have any code deployed there, right now, the +transaction will revert. + +We process the L2 transactions according to our account abstraction protocol: +[https://v2-docs.zksync.io/dev/tutorials/custom-aa-tutorial.html#prerequisite](https://v2-docs.zksync.io/dev/tutorials/custom-aa-tutorial.html#prerequisite). + +1. We [deduct](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1073) + the transaction’s upfront payment for the overhead for the block’s processing. + You can read more on how that works in the fee model [description](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/zkSync%20fee%20model.md). +2. Then we calculate the gasPrice for these transactions according to the EIP1559 rules. +3. We [conduct the validation step](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1180) + of the AA protocol: + +- We calculate the hash of the transaction. +- If enough gas has been provided, we near_call the validation function in the bootloader. It sets the tx.origin to the + address of the bootloader, sets the ergsPrice. It also marks the factory dependencies provided by the transaction as + marked and then invokes the validation method of the account and verifies the returned magic. +- Calls the accounts and, if needed, the paymaster to receive the payment for the transaction. Note, that accounts may + not use `block.baseFee` context variable, so they have no way to know what exact sum to pay. That’s why the accounts + typically firstly send `tx.maxFeePerErg * tx.ergsLimit` and the bootloader + [refunds](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L730) + for any excess funds sent. + +1. [We perform the execution of the transaction](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1234). + Note, that if the sender is an EOA, tx.origin is set equal to the `from` the value of the transaction. During the + execution of the transaction, the publishing of the compressed bytecodes happens: for each factory dependency if it + has not been published yet and its hash is currently pointed to in the compressed bytecodes area of the bootloader, a + call to the bytecode compressor is done. Also, at the end the call to the KnownCodeStorage is done to ensure all the + bytecodes have indeed been published. +2. We + [refund](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1401) + the user for any excess funds he spent on the transaction. The process is as follows: + +3. The `postTransaction` operation is called to the `paymaster`. +4. The Bootloader asks the operator to provide a refund. + During the first VM run—without proofs—the operator directly inserts the refunds in the memory of the bootloader. + During the run for the proved batches, the operator already knows which values have to be inserted. + You can read more about the [fee model here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/zkSync%20fee%20model.md) +5. The Bootloader refunds the user. +6. We notify the operator about the + [refund that was granted to the user.](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L1112). + It will be used to correctly display the gas used for the transaction in the block explorer. + +## L1->L2 Transactions + +L1->L2 transactions are transactions that were initiated on L1. We assume that `from` has already authorized the L1→L2 +transactions. It also has its L1 pubdata price as well as ergsPrice set on L1. + +Most of the steps from the execution of L2 transactions are omitted and we set `tx.origin` to the `from`, and +`ergsPrice` to the one provided by transaction. +After that, we use [mimicCall](https://matter-labs.github.io/eravm-spec/spec.html#FarCalls) +to provide the operation itself from the name of the sender account. + +Note, that for L1→L2 transactions, `reserved0` field denotes the amount of ETH that should be minted on L2 as a result +of this transaction. `reserved1` is the refund receiver address, i.e. the address that would receive the refund for the +transaction as well as the msg.value if the transaction fails. + +There are two kinds of L1->L2 transactions: + +1. Priority operations, initiated by users (they have type `255`). +2. Upgrade transactions, that can be initiated during system upgrade (they have type `254`). + +[You can read more about differences between the different L1->L2 transaction types here.](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md) + +## End of the Batch + +At the end of the batch we set `tx.origin` and `tx.gasprice` context variables to zero to both save L1 gas on `calldata` and to +send the entire Bootloader balance to the operator. This effectively sends all the fees collected by the Bootloader to the operator. + +Also, we +[set](https://github.com/code-423n4/2023-10-zksync/blob/ef99273a8fdb19f5912ca38ba46d6bd02071363d/code/system-contracts/bootloader/bootloader.yul#L3812) +the fictive L2 block’s data. Then, we call the system context to ensure that it publishes the timestamp of the L2 block +as well as L1 batch. We also reset the `txNumberInBlock` counter to avoid its state diffs from being published on L1. +You can read more about block processing on zkSync +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Batches%20&%20L2%20blocks%20on%20zkSync.md). + +After that, we publish the hash as well as the number of priority operations in this batch. More on it +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20L1%E2%86%92L2%20ops%20on%20zkSync.md). + +Then, we call the L1Messenger system contract for it to compose the pubdata to be published on L1. You can read more +about the pubdata processing +[here](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Smart%20contract%20Section/Handling%20pubdata%20in%20Boojum.md). diff --git a/content/10.zk-stack/10.components/50.zksync-evm/20.precompiles.md b/content/10.zk-stack/10.components/50.zksync-evm/20.precompiles.md new file mode 100644 index 00000000..0797d7c3 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/20.precompiles.md @@ -0,0 +1,198 @@ +--- +title: Precompiles +description: Explanation of precompiled contracts for elliptic curve operations essential for zkSNARK verification. +--- + +## Overview of Precompiled Contracts in Ethereum + +In Ethereum, precompiled contracts offer advanced functionalities not readily available in the standard set of opcodes. +These contracts simplify and optimize resource-intensive operations, such as those involving elliptic curves, which are crucial for zkSNARK verification. + +### Role of Precompiled Contracts + +Precompiled contracts are embedded within the Ethereum Virtual Machine (EVM) at predetermined addresses. +They facilitate operations like elliptic curve point addition, scalar multiplication, +and pairing—each essential for cryptographic processes in blockchain applications. + +### Understanding Elliptic Curve Operations + +Elliptic curve operations are fundamental to blockchain cryptography, providing the security necessary for transactions and smart contracts. +These operations include: + +- **Elliptic Curve Point Addition:** Combines two points on a curve to produce a third point. +- **Elliptic Curve Point Scalar Multiplication:** Multiplies a point on a curve by a scalar, producing another point. +- **Elliptic Curve Pairing:** A complex operation used in more advanced cryptographic constructs, such as zkSNARKs. + +The precompiled contracts designated for these tasks are essential for efficient and secure cryptographic verification. + +### How Precompiled Contracts Work + +These contracts are hardcoded into the EVM and can be accessed via fixed addresses starting from `1`. +Each contract comes with a set gas cost for execution, excluding the gas required for calling the contract and managing data in memory. +For instance, in the Go-Ethereum implementation, the execution logic and gas calculations are defined within the Go programming language, +following specifications like [EIP-196](https://eips.ethereum.org/EIPS/eip-196). + +### Evolving with Ethereum + +The Ethereum network occasionally introduces new precompiled contracts through network upgrades or hard forks. +These additions ensure that Ethereum can handle new cryptographic challenges and efficiency demands as they arise. + +Precompiled contracts thus play a crucial role in maintaining Ethereum's cryptographic integrity and performance, +especially in areas requiring intense computational resources, like those needed for zkSNARK verification. + +--- +## Field Arithmetic + +The BN254 (also known as alt-BN128) is an elliptic curve defined by the equation $y^2 = x^3 + 3$ over the finite field +$\mathbb{F}_p$, being $p = 218882428718392752222464057452572750886963111572978236626890378946452262$08583. The modulus +is less than 256 bits, which is why every element in the field is represented as a `uint256`. + +The arithmetic is carried out with the field elements encoded in the Montgomery form. This is done not only because +operating in the Montgomery form speeds up the computation but also because the native modular multiplication, which is +carried out by Yul's `mulmod` opcode, is very inefficient. + +Instructions set on zkSync and EVM are different, so the performance of the same Yul/Solidity code can be efficient on +EVM, but not on zkEVM and opposite. + +One such very inefficient command is `mulmod`. On EVM there is a native opcode that makes modulo multiplication and it +costs only 8 gas, which compared to the other opcodes costs is only 2-3 times more expensive. On zkEVM we don’t have +native `mulmod` opcode, instead, the compiler does full-with multiplication (e.g. it multiplies two `uint256`s and gets +as a result an `uint512`). Then the compiler performs long division for reduction (but only the remainder is kept), in +the generic form it is an expensive operation and costs many opcode executions, which can’t be compared to the cost of +one opcode execution. The worst thing is that `mulmod` is used a lot for the modulo inversion, so optimizing this one +opcode gives a huge benefit to the precompiles. + +### Multiplication + +As said before, multiplication was carried out by implementing the Montgomery reduction, which works with general moduli +and provides a significant speedup compared to the naïve approach. + +The squaring operation is obtained by multiplying a number by itself. However, this operation can have an additional +speedup by implementing the SOS Montgomery squaring. + +### Inversion + +Inversion was performed using the extended binary Euclidean algorithm (also known as extended binary greatest common +divisor). This algorithm is a modification of Algorithm 3 `MontInvbEEA` from +[Montgomery inversion](https://cetinkayakoc.net/docs/j82.pdf). + +### Exponentiation + +The exponentiation was carried out using the square and multiply algorithm, which is a standard technique for this +operation. + +--- +## Montgomery Form + +Let’s take a number `R`, such that `gcd(N, R) == 1` and `R` is a number by which we can efficiently divide and take +module over it (for example power of two or better machine word, aka 2^256). Then transform every number to the form of +`x * R mod N` / `y * R mod N` and then we get efficient modulo addition and multiplication. The only thing is that +before working with numbers we need to transform them to the form from `x mod N` to the `x * R mod N` and after +performing operations transform the form back. + +For the latter, we will assume that `N` is the module that we use in computations, and `R` is $2^{256}$, since we can +efficiently divide and take module over this number and it practically satisfies the property of `gcd(N, R) == 1`. + +### Montgomery Reduction Algorithm (REDC) + +> Reference: <https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm> + +```solidity +/// @notice Implementation of the Montgomery reduction algorithm (a.k.a. REDC). +/// @dev See <https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_REDC_algorithm> +/// @param lowestHalfOfT The lowest half of the value T. +/// @param higherHalfOfT The higher half of the value T. +/// @return S The result of the Montgomery reduction. +function REDC(lowestHalfOfT, higherHalfOfT) -> S { + let q := mul(lowestHalfOfT, N_PRIME()) + let aHi := add(higherHalfOfT, getHighestHalfOfMultiplication(q, P())) + let aLo, overflowed := overflowingAdd(lowestHalfOfT, mul(q, P())) + if overflowed { + aHi := add(aHi, 1) + } + S := aHi + if iszero(lt(aHi, P())) { + S := sub(aHi, P()) + } +} + +``` + +By choosing $R = 2^{256}$ we avoided 2 modulo operations and one division from the original algorithm. This is because +in Yul, native numbers are uint256 and the modulo operation is native, but for the division, as we work with a 512-bit +number split into two parts (high and low part) dividing by $R$ means shifting 256 bits to the right or what is the +same, discarding the low part. + +### Montgomery Addition/Subtraction + +Addition and subtraction in Montgomery form are the same as ordinary modular addition and subtraction because of the +distributive law + +$$ +\begin{align*} +aR+bR=(a+b)R,\\ +aR-bR=(a-b)R. +\end{align*} +$$ + +```solidity +/// @notice Computes the Montgomery addition. +/// @dev See <https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm> for further details on the Montgomery multiplication. +/// @param augend The augend in Montgomery form. +/// @param addend The addend in Montgomery form. +/// @return ret The result of the Montgomery addition. +function montgomeryAdd(augend, addend) -> ret { + ret := add(augend, addend) + if iszero(lt(ret, P())) { + ret := sub(ret, P()) + } +} + +/// @notice Computes the Montgomery subtraction. +/// @dev See <https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm> for further details on the Montgomery multiplication. +/// @param minuend The minuend in Montgomery form. +/// @param subtrahend The subtrahend in Montgomery form. +/// @return ret The result of the Montgomery addition. +function montgomerySub(minuend, subtrahend) -> ret { + ret := montgomeryAdd(minuend, sub(P(), subtrahend)) +} + +``` + +We do not use `addmod`. That's because in most cases the sum does not exceed the modulus. + +### Montgomery Multiplication + +The product of $aR \mod N$ and $bR \mod N$ is $REDC((aR \mod N)(bR \mod N))$. + +```solidity +/// @notice Computes the Montgomery multiplication using the Montgomery reduction algorithm (REDC). +/// @dev See <https://en.wikipedia.org/wiki/Montgomery_modular_multiplication#The_The_REDC_algorithm> for further details on the Montgomery multiplication. +/// @param multiplicand The multiplicand in Montgomery form. +/// @param multiplier The multiplier in Montgomery form. +/// @return ret The result of the Montgomery multiplication. +function montgomeryMul(multiplicand, multiplier) -> ret { + let hi := getHighestHalfOfMultiplication(multiplicand, multiplier) + let lo := mul(multiplicand, multiplier) + ret := REDC(lo, hi) +} + +``` + +### Montgomery Inversion + +```solidity +/// @notice Computes the Montgomery modular inverse skipping the Montgomery reduction step. +/// @dev The Montgomery reduction step is skipped because a modification in the binary extended Euclidean algorithm is used to compute the modular inverse. +/// @dev See the function `binaryExtendedEuclideanAlgorithm` for further details. +/// @param a The field element in Montgomery form to compute the modular inverse of. +/// @return invmod The result of the Montgomery modular inverse (in Montgomery form). +function montgomeryModularInverse(a) -> invmod { + invmod := binaryExtendedEuclideanAlgorithm(a) +} +``` + +As said before, we use a modified version of the bEE algorithm that lets us “skip” the Montgomery reduction step. + +The regular algorithm would be $REDC((aR \mod N)^{−1}(R^3 \mod N))$ which involves a regular inversion plus a +multiplication by a value that can be precomputed. diff --git a/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/10.index.md b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/10.index.md new file mode 100644 index 00000000..fd535612 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/10.index.md @@ -0,0 +1,643 @@ +--- +title: zkSync Virtual Machine primer +description: +--- + +Unlike EVM, zkEVM is a register machine. EVM instructions operate on a stack. Instead, zkEVM operates primarily on +sixteen registers and memory like most modern computers. That simplifies zero-knowledge proofs, which largely rely on +building arithmetic circuits. + +This document describes zkEVM assembly language, then the aspects of VM related to smart-contracts. Its purpose is not +to be a complete reference, but to guide you through the main ideas. + +## VM architecture + +The native type for zkEVM is a 256-bits wide unsigned integer, we call it a _word_. + +Contracts are sequences of instructions. To support the execution of contracts, VM provides the following transient +state: + +- **registers**: 16 general-purpose registers: `r0`, `r1`, …, `r15`. + `r0` is a special constant register: reading it yields 0, storing to it is ignored. +- **flags**: three distinct boolean registers LT (less-than), EQ (equals, the result is zero) and GT (greater-than). + Instructions may set or clear flags depending on computation results. +- **data** **stack**: holds $2^{16}$ words, is free to use. +- **heap**: for data that we want to pass around between functions and contracts. Heap is bounded, accesses are only + free inside the bound, and we have to pay for growing the bound. +- **code memory**: stores code of currently running contracts. May also be used as a constant pool. + +VM is aware of two data types: + +- raw integers +- pointers (to fragments of other contracts’ heaps). + +Registers and data stack are tagged: VM keeps track of whether they hold pointers or raw integer values. Some +instructions will only accept operands tagged as pointers. + +Heap and storage are not tagged, so if we store a pointer to the heap, its tag is lost. + +Contracts have key-value storages, where keys and values are untagged 256-bit integers. Instructions can change +persistent contract storage. + +VM is capable of both near calls (to the code within the same contract) and far calls (to other contracts). + +Let us now gradually introduce the VM functionality guided by the instruction set. + +## Basic instructions + +Contract code consists of instructions, they are executed sequentially. + +Instructions usually operate with registers. For example, an instruction `add` may look like that: + +```asm +; this is a comment +add 5, r2, r8 ; store (5 + r2) to r8 +``` + +Or like that: + +```asm +add 5, r0, r8 ; store (5 + 0) to r8 +``` + +Notice that register `r0` is used to feed constant zero values to instructions; this allows to use `add X, r0, Y` to +copy a value `X` to `Y` . + +Commonly, instructions accept two inputs and one output operands, following the schema: + +![arithmetic opcode](/images/zk-stack/arithmetic_opcode.png) + +The first operand can be taken from: + +- registers +- an immediate 16-bit value, like in the example above `add 5, r2, r8`. To use bigger numbers put them as constants in + the code memory, see section **Code Addressing**. +- directly from the code memory +- stack in various ways, e.g. `add stack=[2], r2, r8` takes the first element from the stack memory area, by an absolute + address 2. +- code memory + +Only registers can be the source of the second operand. + +```asm +add r0, 5, r8 ; error: 5 is an immediate value, + ; but only register is allowed as second operand +``` + +There is usually at most one output operand. Similarly, the first output operand can be stored to registers or stack. If +there is a second output operand, it can only be stored to a register. + +Instructions are executed one after another, and every instruction has a gas cost measured in _gas_. A program that runs +out of gas panics and none of its side effects are performed. + +Every contract may have at most $2^{16}$ instructions. + +### Arithmetic instructions + +Besides `add`, zkEVM implements `sub` for subtraction, `and`/ `or` / `xor` for bitwise logics, `shl`/ `shr` for logical +shifts, `rol`/ `ror` for circular shifts. These instructions follow the same format, e.g.: + +```asm +shl r1, r4, r3 ; right shift r1 by value of r4, store result in r3 +``` + +Instructions `mul` and `div` are particular: they have two output operands: + +- `mul r1, r2, r3, r4` stores the low 256 bits of r1_r2 in r3, high 256 bits of r1_r2 in r4 +- `div r1, r2, r3, r4` stores the quotient in `r3` and remainder in `r4`. + +### Modifiers + +Most instructions support modifiers that alter their behaviour. The modifiers are appended to the name of the +instruction, separated by a dot e.g. `sub.s` . Three basic modifier types are: `set_flags` , predicates, and `swap`. + +#### Set flags + +By default, most instructions preserve flags. + +```asm +sub r1, r2, r3 ; r3 <- (r1 - r2), no flags are affected +``` + +The instruction `sub` is implemented so that it sets `EQ` if the result is zero (that is, if `r1` == `r2`). But in this +case, even if `r1-r2` is zero, the EQ flag is not set, because we did not allow it explicitly. We allow instruction to +set flags by appending a “set flags” modifier to them, like that: + +```asm +sub! r1, r2, r3 ; r3 <- (r1 - r2); EQ = 1 +``` + +Most instructions with “set flags” modifier set the flags as follows: + +- `EQ` - if result is zero +- `LT` - if overflow occurs (result is "less" than zero) +- `GT` - if not `EQ` and not `LT` (result is "greater" than zero) + +Note that the details of the behavior may vary depending on which instruction is used. + +You can learn more in the +[formal specification](/zk-stack/components/zksync-evm/vm-specification/formal-spec). + +#### Predicates + +Another type of modifiers allows transforming any instruction into a _predicated_, conditional instruction. Predicated +instructions are only executed if flags satisfy their condition. + +Recall the three flags: LT, EQ and GT. + +For example, this `sub` instruction is only executed if EQ is set: + +```asm +sub.eq r1, r2, r5 +``` + +Here is how we can execute `jump` to a label `.label_if_equals` only if `r1 == r2` : + +```asm +sub! r1, r2, r3 ; r3 <- (r1 - r2); EQ = 1 if r1 == r2 +jump.eq .label_if_equals +``` + +If the condition is not satisfied, we skip the instruction, but still pay its basic cost in gas. + +Here is a full list of available predicates: + +- `gt` +- `eq` +- `lt` +- `ge` (short for “GT or EQ”) +- `le` (short for “LT or EQ”) +- `ne` (short for "not EQ") +- `gtlt` (short for "GT" or "LT") +- `of` (synonym for "LT") + +You can learn more in the +[formal specification](/zk-stack/components/zksync-evm/vm-specification/formal-spec). + +#### Swap + +Recall that instructions may only accept data from stack as their first operand. What if we need the second operand from +stack? For commutative operation, like `add` , `mul`, or `and`, the order of operands does not matter and we can just +write `add x,y,z` instead of `add y,x,z`. However, for operations like `sub` or `div` we implement a special “swap” +modifier which exchanges the operand values before executing the instruction. This is useful to work around the +restriction that the second source operand has to be a register. + +For example: + +```asm +sub r1, r2, r3 ; r3 <- r1 - r2 +sub.s r1, r2, r3 ; r3 <- r2 - r1 + +``` + +Finally, here is an example of an instruction adorned with all possible modifiers: + +```asm +sub.s.lt! r8, r4, r12 +``` + +Here is a breakdown of modifiers: + +- `.lt` : is only executed if the LT flag is set +- `.s` : computes `r4 - r8` instead of `r8 - r4` +- `!` : sets flags + +$$ +\begin{aligned} +LT &\leftarrow r_4 < r_8 \\ +EQ &\leftarrow r_4 - r_8 = 0 \\ +GT &\leftarrow r_4 > r_8 +\end{aligned} +$$ + +Other modifiers are instruction-specific. They are described in full in the instruction reference. + +## Calls and returns + +The `jump` instruction allows to continue execution from a different place, but it does not allow to return back. An +alternative is using calls; zkEVM supports calling code inside the contract itself (near calls) as well as calling other +contracts (far calls). + +### Far calls + +Far calls are the equivalent of calls in EVM. + +Each call gets its own stack, heap, code memories, and allocated gas. + +It is impossible to allocate more than 63/64 of the currently available gas to a far call. + +Calls can revert or panic (on executing an illegal instruction for example), which undoes all the changes to storage, transient storage and +events emitted during the call, and returns unspent gas to the caller. + +Suppose we far called a contract $C$. After the execution of $C$, the register `r1` holds a pointer to the return value, +allowing a read-only access to a fragment of $C$’s heap. Alternatively, `r1` can hold a pointer to the heap of some +other contract that $C$ called internally. More on that in Pointers section. + +**Delegate calls.** Beside normal `far_call`, there is a variant `far_call.delegate`. Delegate calls are a variation of +far calls allowing to call a contract with the current storage space. + +For example, suppose we have contracts A,B,C. Contract A calls B normally, then B delegates to C. Then C’s code is +executed in a context of B’s storage, as if contract A called contract C. If C returns normally, the execution will +proceed from the next instruction of B after delegate call. In case of `revert` or `panic` in C, all the usual rules +apply. + +**Mimic calls.** The last variant of far calls is `far_call.mimic`; it is inaccessible to users and only allowed in +system contracts. + +Any of far call variants can be additionally marked as `.static` to call a contract in static mode — see section +**Static Mode**. + +### Return, revert, panic + +There are three types of situations where control returns to the caller: + +- Return: a normal way of returning to the caller when no errors occurred. The instruction is `ret`. +- Revert: a recoverable error happened. Unspent gas is returned to the caller, which will execute the exception handler. + The instruction is `revert`. +- Panic: an irrecoverable error happened. Same as revert, but `LT` flag is set. The instruction is `ret.panic`. + +### Near calls + +Instruction `near_call reg, address` passes the control to a different address inside the same contract, like `jump`. +Additionally, it remembers the context of execution in a special _call stack_ (it is different from data stack and not +accessible to assembly programmers). + +Here is an example of calling function `f` . + +```asm +.text + +; here will be the code of exception handler +eh: + +; caller function +main: +near_call r2, @f, @eh ; refer to labels in code using '@' symbol + +; callee function +f: +ret + +``` + +Additional two arguments: + +- label `@eh` is the address of exception handler. Functions, like contracts, may revert or panic, which leads to the + execution of the exception handler. +- register `r2` holds how much gas we allocate to the function. + +As we see, zkEVM supports allocating ergs not only for far calls, but also for near calls. Passing zero will allocate +all available gas. Unlike in far calls, near calls do not limit the amount of gas passed to 63/64 of available gas. + +All near calls inside the contract are sharing the same memory space (heap, stack), and do not roll back the changes to +this memory if they fail. They do, however, roll back the changes to storage, transient storage and events. + +Near calls cannot be used from Solidity to their full extent. Compiler generates them, but makes sure that if functions +revert or panic, the whole contract reverts of panics. Explicit exception handlers and allocating just a portion of +available gas are reserved for low-level code. + +## Accessing data outside registers + +### Stack addressing + +As we already know, instructions may accept data not only in registers or as immediate 16-bit values, but also on stack. + +Data stack is a collection of $2^{16}$ words with a pointer SP. This pointer contains the next address after the topmost +stack element, so the topmost element has the address SP-1. Stack grows towards maximal address, i.e. pushing an element +to stack increases SP. + +On far call, SP starts in a new stack memory at 1024. + +#### Reading from stack + +There are several ways of accessing stack cells: + +```asm +.text +main: + +; r0 <- stack word by absolute index (r1+42), unrelated to SP +add stack=[r1+42], r0, r2 + +; r0 <- stack word by index (SP - (r1 + 42)) +add stack[r1+42], r0, r2 + +; r2 <- stack word by index (SP - (r1 + 42)); additionally, SP += (r1+42) +add stack-=[r1+42], r0, r2 +``` + +As we see there are three stack address modes for input operands; all of them use (register + offset). + +Currently, the last mode is only used in a `nop` instruction as a way to rewind stack: + +```asm +; effectively, SP -= reg+imm +nop stack-=[reg+imm] +``` + +#### Writing to stack + +Storing results on stack is also possible: + +```asm +.text +main: + +; r1 -> word by absolute index (r2 + 42) +add r1, r0, stack=[r2 + 42] + +; r1 -> word by absolute index SP - (r2 + 42) +add r1, r0, stack[r2 + 42] + +; r1 -> word by absolute index SP + (r2 + 42) +; additionally, SP += r2 + 42 +add r1, r0, stack+=[r2 + 42] +``` + +Currently, the last mode is only used in a `nop` instruction as a way to forward stack pointer: + +```asm +; effectively, SP += reg+imm +nop r0, r0, stack+=[reg+imm] +``` + +### Code addressing + +Sometimes we might need to work with larger chunks that do not fit into 16-bit. In this case we can use the +(read-only) code memory as a constant pool and read 256-bit constants from there. + +```asm +.rodata + +datavar: + .cell 42 + .cell 999 +.text +somelabel: + +; r2 <- word by index (r0+0) code memory +add @datavar[0], r0, r2 +add @datavar[r2], r0, r2 +``` + +Note: instructions are 64-bit wide, but when accessing data in code memory, this memory is treated as word-addressable. +Therefore, e.g. reading the 0-th 256-bit word from this memory will yield a binary representation of the four first +64-bit instructions in the contract. + +There is no distinction between static data and code: code can be read, data can be executed, but instructions that are +not correctly encoded will trigger panic. + +Contracts always need to be divisible by 32 bytes (4 instructions) because of this addressing mode. + +### Using heap + +Heap is a bounded memory region to store data between near calls, and to communicate data between contracts. + +#### Heap boundary growth + +Accessing an address beyond the heap bound leads to heap growth: the bound is adjusted to accommodate this address. The +difference between old and new bounds is paid in gas. + +#### Instructions to access heap + +Most instructions can not use heap directly. Instructions `ld.1` and `st.1` are used to load and store data on heap: + +```asm +; take a 32-bit number from r1, use it as an offset in heap, +; load the word from heap by this offset to r4 +ld.1 r1, r4 + +; take a 32-bit number from r3, use it as an offset in heap, +; store the word from r5 to heap by this offset +st.1 r3, r5 +``` + +Heap is byte-addressable, but reads and writes operate in words. To read two consecutive words in heap starting at an +address A, first, read from A, and then read from A+32. Reading any addresses in between is valid too. + +One of the modifiers allows to immediately form a new offset like that: + +```asm +; same as ld, but additionally r5 <- r1 + 32 +ld.1.inc r1, r4, r5 +``` + +This allows reading several consecutive words in a row: + +```asm +; reads four consecutive words from heap starting at address in r8 +; into registers r1, r2, r3, r4 +ld.1.inc r8, r1, r8 +ld.1.inc r8, r2, r8 +ld.1.inc r8, r3, r8 +ld.1.inc r8, r4, r8 +``` + +In theory, heap can hold nearly $2^{32}$ bytes, but growing a heap so large is not affordable: the maximum gas allocated +is $2^{32}-1$. + +The topmost 32 bytes of heap are considered forbidden addresses, trying to access them results in panic no matter how +much gas is available. + +#### Heap and Auxheap + +In zkEVM, there are two heaps; every far call allocates memory for both of them. + +Heaps are selected with modifiers `.1` or `.2` : + +- `ld.1` reads from heap; +- `ld.2` reads from auxheap. + +The reason why we need two heaps is technical. Heap contains calldata and returndata for calls to user contracts, while +auxheap contains calldata and returndata for calls to system contracts. This ensures better compatibility with EVM as +users should be able to call zkEVM-specific system contracts without them affecting calldata or returndata. + +## Fat pointers + +A fat pointer is the second type of values in zkEVM, beside raw integers. + +As we noted, registers and stacks are internally tagged by VM to keep track of the cells containing pointers in their +low 128 bits. Only cells with a set pointer tag are considered fat pointers. + +Fat pointers are used to pass read-only data between contracts. When choosing how to pass data to a contract (whether +when calling or returning from a call) we have a choice: + +- pass an existing fat pointer, or +- create a new fat pointer from a fragment of heap/auxheap. + +Fat pointers combine two aspects: + +- Delimit a fragment accessible to other contract. Accesses outside this fragment through a pointer yield zero. +- Provide an offset inside this fragment. This offset can be increased or decreased. + +The restrictions on fat pointers provide allows to pass data between contracts safely and without excessive copying. + +**Implementation note.** Internally, fat pointers hold four 32-bit values: + +- bits 0..31 : offset +- bits 32..63: internal memory page ID +- bits 64…95 : starting address of the fragment +- bits 96…127 : length of the fragment + +#### Instructions to manipulate fat pointers + +Only special instructions can manipulate fat pointers without automatically clearing its pointer tag. + +- `ptr.add`, `ptr.sub` modify the offset inside pointer +- `ptr.shrink` reduces the associates fragment, so if we get a fat pointer from contract A, we can then shrink it and + pass to another contract B up the call chain, again without copying data. +- `ptr.pack` allows putting data in the top 128 bit of the pointer value without clearing the pointer tag. + +Doing e.g. `add r1, 0, r1` on a pointer in `r1` clears its tag, and it is now considered as a raw integer. + +Instructions `ld` and `[ld.inc](http://ld.inc)` (without indices 1 or 2) allow loading data by fat pointers, possibly +incrementing the pointer. It is impossible to write by a fat pointer. + +## Contracts and storage + +All accounts are associated with contracts. There are $2^{160}$ valid account addresses. + +In zkEVM, contracts may have multiple **functions** in them; a contract may execute its functions by using `near_call` ; +it may call other contracts by using `far_call` or its variations `delegate_call` / `mimic_call` (mimic is reserved for +system contracts). + +Size of a contract should be divisible by 32 bytes (4 instructions). + +### Storage of contracts + +Every account has a storage. Storage maps $2^{256}$ keys to values; both keys and values are 256-bit untagged words. + +Contracts may write to their own storage by using `sstore key, value` and read from storage using `sload key, dest`. + +### Static mode + +Static mode prevents contracts from modifying their storage and emitting events. In static mode, executing an +instruction like `sstore` sends VM into panic. + +To execute a contract C in static mode, use a `static` modifier: `far_call.static`. All contracts, called by C +recursively, will also be executed in static mode. VM exits static mode automatically when C terminates. + +### System contracts + +Part of Era’s functionality is implemented through system contracts. These contracts have addresses from 0 to $2^{64}$ +and are executed in kernel mode, where they have access to privileged instructions. An example of such instruction is +mimic call, a variant of far call where the caller can pretend to be another contract. This is useful for hiding the +fact that something is implemented via a system contract but in the hands of users it would mean being able to steal +anyone’s tokens. + +System contracts implement contract deployment, extensions such as keccak256, decommitting code etc. + +## Server and VM environment + +### Decommitter + +Decommitter is a module external to zkEVM allowing accessing deployed code by its hash. + +![Overview of VM modules](/images/zk-stack/vm-mapping.png) + +The system contracts at the address $2^{15}+2$ , called Deployer, keeps hashes of code of each contract in its storage. +Far calls to a contract with address $C$ perform as follows: + +- VM internally accesses the storage of `Deployer` contract by key $C$. This storage yields the hash value $H$**.** +- then VM queries the decommitter, providing $H$. Decommitter answers with the contract code. + +If decommitter does not have the code for the requested hash, one of two things happen: + +- if C is a system contract (i.e. address of $C < 2^{16}$), the call will fail +- otherwise, VM will call the `DefaultAccount` contract. + +### Server + +The VM is controlled by a _server._ When the server needs to build a new batch, it starts an instance of zkEVM and feeds +the transactions to the [Bootloader](#bootloader). + +zkEVM accepts three parameters: + +1. Bootloader’s hash. It is used to fetch the bootloader code from decommitter. +2. Code hash of `DefaultAccount` contract code. It is used to fetch the default code from Decommitter in case of a far + call to a contract without any associated code. +3. A boolean flag `is_porter_available`, to determine the number of shards (two if zkPorter is available, one + otherwise). + +zkEVM retrieves the code of bootloader from Decommitter and proceeds with sequential execution of instructions on the +bootloader’s code page. + +#### Failures and rollbacks + +There are three types of behaviour triggered by execution failures. + +1. Skipping a malformed transaction. It is a mechanism implemented by the server, external to zkEVM. Server makes a + snapshot of zkEVM state after completing every transaction. If the bootloader encounters a malformed transaction, it + fails, and the server restarts zkEVM from the most recent snapshot, skipping this transaction. + + This behaviour is specific to server/bootloader; the contract code has no ways of invoking it. + +2. Revert is triggered by the contract code explicitly by executing `revert`. zkEVM saves its persistent state on every + near or far call. If the contract code identifies a recoverable error, it may execute `revert`; then zkEVM rolls the + storage and event queues back to the last checkpoint and executes the exception handler. +3. Panic is triggered either explicitly by executing `panic` or internally when some execution invariants are violated + e.g. attempt to use raw integer in `ptr.add` instruction. + + On panic, the persistent state of zkEVM is rolled back in the same way as on revert. + +### Bootloader + +Bootloader is a system contract in charge of block construction +(**[sources](https://github.com/matter-labs/era-contracts/blob/main/system-contracts/bootloader/bootloader.yul)**). + +Formally, bootloader is assigned an address BOOTLOADER_SYSTEM_CONTRACT_ADDRESS = $2^{15}+1$, but zkEVM decommits its +code directly by its hash. + +The heap of the bootloader is special: it acts as an interface between server and zkEVM. Server gradually fills the +bootloader’s heap with transaction data, formatted according to an implementation-defined convention. + +The bootloader then acts roughly as the following code (not an actual implementation): + +```solidity +contract Bootloader { + function executeBlock(address operatorAddress, Transaction[2] memory transactions) { + for (uint256 i = 0; i < transactions.length; i++) { + validateTransaction(transactions[i]); + chargeFee(operatorAddress, transactions[i]); + executeTransaction(transactions[i]); + } + } + + function validateTransaction(Transaction memory tx) { + // validation logic + } + + function chargeFee(address operatorAddress, Transaction memory tx) { + // charge fee + } + + function executeTransaction(Transaction memory tx) { + // execution logic + } +} + +``` + +The bootloader is therefore responsible for: + +- validating transactions; +- executing transactions to form a new block; +- setting some of the transaction- or block-wide transaction parameters (e.g. `blockhash`, `tx.origin`). + +Server makes a snapshot of zkEVM state after completing every transaction. When the bootloader encounters a malformed +transaction, it fails, and the server restarts zkEVM from the most recent snapshot, skipping this transaction. If a +transaction is well-formed, zkEVM may still panic while handling it outside the bootloader code. This is a normal +situation and is handled by zkEVM in a regular way, through panics. + +The exact code of the bootloader is a part of a protocol; its hash is included in the block header. + +### Context value + +A part of the zkEVM state is a 128-bit _context value_. It implements `msg.value` standing for the amount of wei sent in +a transaction. In assembly, it is used as follows: + +1. Execute `context.set_context_u128 reg` to set the value; +2. Perform a far call — it captures the context value; +3. In a called contract, access the context value through `context.get_context_u128 reg`. + +Context value can not be set in static mode. diff --git a/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/20.formal-spec.md b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/20.formal-spec.md new file mode 100644 index 00000000..ea8528be --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/20.formal-spec.md @@ -0,0 +1,9 @@ +--- +title: VM Formal Specification +description: +--- + +This is the specification of the instruction set of EraVM 1.4.0, a language virtual machine for zkSync Era. +It describes the virtual machine's architecture, instruction syntax and semantics, and some elements of system protocol. + +[See the Formal Specification here.](https://matter-labs.github.io/eravm-spec/spec.html) diff --git a/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/_dir.yml b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/_dir.yml new file mode 100644 index 00000000..47392cc1 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/30.vm-specification/_dir.yml @@ -0,0 +1 @@ +title: Virtual Machine Specification diff --git a/content/10.zk-stack/10.components/50.zksync-evm/_dir.yml b/content/10.zk-stack/10.components/50.zksync-evm/_dir.yml new file mode 100644 index 00000000..179d2b70 --- /dev/null +++ b/content/10.zk-stack/10.components/50.zksync-evm/_dir.yml @@ -0,0 +1 @@ +title: zkSync EVM diff --git a/content/10.zk-stack/10.components/60.prover/10.index.md b/content/10.zk-stack/10.components/60.prover/10.index.md new file mode 100644 index 00000000..7a65a7e5 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/10.index.md @@ -0,0 +1,70 @@ +--- +title: Overview +description: Exploring the prover in zkSync, a ZK rollup technology, which ensures secure and efficient transaction verification through cryptographic proofs. +--- + +zkSync utilizes Zero-Knowledge (ZK) proofs to ensure secure and efficient transaction processing on the Ethereum blockchain. +This technology compresses transactions, significantly reducing fees for users while maintaining the robust security standards of Ethereum. + +### How zkSync Utilizes ZK Proofs + +Zero-Knowledge proofs enable a verifier to confirm that a prover has correctly executed a computation without revealing the specifics of the transaction. +This approach ensures both privacy and data security. +In zkSync, the prover is responsible for demonstrating the correct execution of zkSync’s Ethereum Virtual Machine (EVM), +and this proof is then verified by a smart contract on Ethereum. + +### The Proving Process + +The proving process in zkSync involves several crucial steps: + +1. **Witness Generation**: + This is the initial phase where, upon transaction initiation by a user, a witness is generated. + This witness acts as proof of the transaction's validity according to the network's consensus rules, without disclosing any transaction details. + Witnesses for new transactions are collected in batches and processed together. + +2. **Circuits**: + To generate accurate proofs, standard code logic must be converted into a format interpretable by the proof system. + This conversion involves organizing code into various circuits within a virtual machine, ensuring that every aspect of the code execution can be proven. + +3. **Proof System**: + The ZK circuit requires a robust proof system for processing. + In zkSync, this system is called Boojum. It comprises several components: + - **Boojum**: + This repository acts as a toolkit containing essential tools for proving and verifying circuit functionality, + along with backend components necessary for circuit construction. + - **zkevm_circuits**: + This repository is where the actual EVM-replicating circuits are built using tools from Boojum. + - **zkevm_test_harness**: + Serving as the testing ground, this repository contains various tests to ensure the circuits function correctly + and includes code essential for running these circuits. + +### Understanding Circuits in zkSync + +A ZK circuit functions similarly to an arithmetic circuit, +where inputs at the bottom pass through various computational steps, +resulting in a final output at the top. +The prover's job is to demonstrate that each step of the circuit is computed correctly, satisfying the circuit with the correct outputs. + +It's crucial that each circuit step is fully constrained to prevent any incorrect computations by a malicious prover. +In Zero-Knowledge terminology, an underconstrained circuit may result in a soundness error, allowing invalid proofs to pass verification. + +![Arithmetic Circuit Diagram](/images/zk-stack/circuit.png){class="bg-white m-auto"} + +### What zkSync’s Circuits Prove + +The primary purpose of zkSync's circuits is to ensure the correct execution of the VM, covering every opcode, +storage interaction, and the integration of precompiled contracts. +These elements are crucial for the holistic functioning and security of the system. +This is described in more detail in +[Circuits](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits.md) + +### Additional Resources + +For those interested in a deeper dive into the technology behind zkSync's proof system, +resources like Vitalik Buterin's blog on [Plonk](https://vitalik.eth.limo/general/2019/09/22/plonk.html) +and the [Plonky2](https://github.com/mir-protocol/plonky2/blob/main/plonky2/plonky2.pdf) +paper provide extensive information on the arithmetization process. +More comprehensive details can also be found in the [Redshift Paper](https://eprint.iacr.org/2019/1400.pdf). + +Through these sophisticated cryptographic processes, zkSync's prover efficiently secures and verifies transactions, +leveraging the power of ZK proofs to enhance blockchain scalability and security. diff --git a/content/10.zk-stack/10.components/60.prover/20.zk-terminology.md b/content/10.zk-stack/10.components/60.prover/20.zk-terminology.md new file mode 100644 index 00000000..999f37d2 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/20.zk-terminology.md @@ -0,0 +1,77 @@ +--- +title: ZK Terminology +description: This article provides definitions for key terms used in zkSync's zero-knowledge proof systems. +--- + +## Arithmetization + +Arithmetization is a technique in zero-knowledge proof systems that converts computations into polynomial equations +for efficient verification by a prover and a verifier. + +## Builder + +The builder sets up the constraint system, determining the placement and geometry of gates and providing essential information for system construction. + +## Circuit + +An arithmetic circuit is a cryptographic tool that encodes computational problems using gates, each performing operations like addition or multiplication. + +## Constraint + +A constraint is a rule that certain operations must follow to maintain validity and support proof generation in zkSync. + +## Constraint degree + +Constraint degree refers to the highest polynomial degree present in the gates of a constraint system, with zkSync allowing up to a degree of eight. + +## Constraint system + +A constraint system is a set of polynomial constraints representing the proofs to be verified. +It is satisfied when specific values assigned to its variables make all equations true. + +## Geometry + +In zkSync's PLONK arithmetization, geometry refers to the arrangement of witness data in a grid format across defined rows and columns. +Each row defines a gate (or a few gates), and the columns are as long as needed to hold all of the witness data. zkSync uses ~164 base witness columns. + +## Log + +A log in zkSync is similar to a database log, recording a list of changes within the system. + +## Lookup table + +A lookup table maps input values to outputs, streamlining the validation of computations and relationships in zero-knowledge proofs +by providing a quick reference. + +## Proof + +A proof can either indicate the entire proving process or specifically refer to the data that the prover sends to the verifier. + +## Prover + +In zkSync, a prover processes transactions by computing proofs that validate state transitions, which are then verified by a smart contract on Ethereum. + +## Satisfiable + +A circuit or constraint system is satisfiable if a provided witness meets all set conditions and constraints. + +## State Differentials + +State Diffs show the differences in account states before and after transaction processing, indicating changes like an increase in ETH balance. + +## Variables + +Variables serve as placeholders in a constraint system, holding space for witness data that will satisfy the defined constraints. + +## Verifier + +The Verifier, a smart contract on Ethereum, checks the validity of received proofs and updates the state root accordingly. + +## Witness + +A witness is the confidential input in a cryptographic circuit, representing the knowledge the prover wishes to demonstrate without full disclosure. + +## Worker + +A worker in zkSync is part of a multi-threaded proving system that allows for parallel execution of certain cryptographic computations, +such as polynomial addition. diff --git a/content/10.zk-stack/10.components/60.prover/30.running-the-prover.md b/content/10.zk-stack/10.components/60.prover/30.running-the-prover.md new file mode 100644 index 00000000..5b9bdc69 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/30.running-the-prover.md @@ -0,0 +1,23 @@ +--- +title: Running the Prover +--- + +Our ZK code is spread across three repositories: + +[Boojum](%%zk_git_repo_era-boojum%%/tree/main) contains the low level ZK details. + +[zkevm_circuits](%%zk_git_repo_era-zkevm_circuits%%/tree/main) contains the code for the circuits. + +[zkevm_test_harness](%%zk_git_repo_era-zkevm_test_harness%%/tree/v1.4.0) contains the tests for the +circuits. + +To get started, run the `basic_test` from the `era-zkevm_test_harness`: + +```bash +rustup default nightly-2023-08-23 +cargo update +cargo test basic_test --release -- --nocapture + +``` + +This test may take several minutes to run, but you will see lots of information along the way! diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/10.index.md b/content/10.zk-stack/10.components/60.prover/40.circuits/10.index.md new file mode 100644 index 00000000..e0b59108 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/10.index.md @@ -0,0 +1,124 @@ +--- +title: Overview +description: +--- + +The main circuit is called `MainVM`. It is the one where all the main logic happens. + +It consists of multiple cycles, where on each iteration we take a next opcode and try to execute it the following way: + +```rust +if opcode == Add { + // do addition +} +if opcode == SRead { + // do storage read +} +... +``` + +You may notice that `Add` instruction is much simpler than the `SRead` one. When you work with circuits you still need +to execute every opcode. + +That’s why we can use the following approach: + +```rust +if opcode == Add { + // do addition +} +if opcode == SRead { + storage_queue.push((address, value)); + // proof storage read in other circuit +} +... +``` + +So instead of proving `SRead` we just push a proving request, that will be sent to another circuit, that will prove it. +That’s how we can make our prover structure more optimized and flexible. + +For now, we have 13 base layer circuits: + +- [MainVM](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Main%20Vm.md) +- [CodeDecommitmentsSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/SortDecommitments.md) +- [CodeDecommitter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/CodeDecommitter.md) +- [LogDemuxer](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/DemuxLogQueue.md) +- [KeccakRoundFunction](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/KeccakRoundFunction.md) +- [Sha256RoundFunction](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Sha256RoundFunction.md) +- [ECRecover](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Ecrecover.md) +- [RAMPermutation](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/RAMPermutation.md) +- [StorageSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/StorageSorter.md) +- [StorageApplication](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/StorageApplication.md) +- [EventsSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/LogSorter.md) +- [L1MessagesSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/LogSorter.md) +- [L1MessagesHasher](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/L1MessagesHasher.md) + +- + +They mostly communicate by queues (the diagram of communication is below). + +## Public Input Structure + +Public Input (PI) is some piece of data, that is revealed to the verifier. Usually, it consists of some inputs and +outputs. + +The main challenge for base layer circuits is the ability to prove unlimited amount of execution. For example, our +`MainVm` circuit can handle execution of $x$ opcodes. Then, if some transaction causes execution of more than $x$ +opcodes, we won’t be able to prove it. That’s why every circuit could be extended to multiple instances. So you can +always use $n$ `MainVm` instances to handle up to $nx$ opcode executions. + +All circuits have the following PI structure: + +![Diagram of Public Inputs for Circuits](/images/zk-stack/circuit-pi-diagram.png) + +| start flag | Boolean that shows if this is the first instance of corresponding circuit type | +| ------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| finished flag | Boolean that shows if this is the last instance of corresponding circuit type | +| Input | Structure that contains all inputs to this type of circuit (every instance of one circuit type has the same input) | +| FSM Input and FSM Output | The field has the same structure. It represents the inner state of circuit execution (the first fsm_input is empty, the second fsm_input equals the first fsm_output and so on…) | +| Output | Structure that contains all outputs of this type of circuit (the last instance contains the real output, the output field of the others is empty) | + +The code implementation can be found +[here](https://github.com/matter-labs/era-zkevm_circuits/blob/main/src/fsm_input_output/mod.rs#L32). + +In terms of Arithmetization we don’t allocate all these fields like public input variables. A more efficient approach +would be computing commitment of type `[Num<F>; 4]` with poseidon2 and then allocating these 4 variables as public +inputs. + +![Diagram showing computing commitments for efficient arithmetization](/images/zk-stack/circuit-commitments.png) + +The equality of corresponding parts in different circuits is done during aggregating base layer circuits. Aggregating is +done by recursion level circuits that also verify base layer proofs. For now this is out of our scope, so we will focus +only on base layer. + +## Overall View of Base Layer Circuits + +![Diagram showing how all base layer circuits fit together](/images/zk-stack/base-layer-circuit-diagram.png) + +## Base Layer Circuits + +[Main Vm](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Main%20Vm.md) + +[SortDecommitments](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/SortDecommitments.md) + +[CodeDecommitter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/CodeDecommitter.md) + +[DemuxLogQueue](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/DemuxLogQueue.md) + +[KeccakRoundFunction](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/KeccakRoundFunction.md) + +[Sha256RoundFunction](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Sha256RoundFunction.md) + +[Ecrecover](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Ecrecover.md) + +[RAMPermutation](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/RAMPermutation.md) + +[StorageSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/StorageSorter.md) + +[StorageApplication](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/StorageApplication.md) + +[LogSorter](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/LogSorter.md) + +[L1MessagesHasher](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/L1MessagesHasher.md) + +There are a couple of circuits that do queue sorting. Here is the page that describes the algorithm: +[Sorting](https://github.com/code-423n4/2023-10-zksync/blob/main/docs/Circuits%20Section/Circuits/Sorting.md) diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/100.sha256-round-function.md b/content/10.zk-stack/10.components/60.prover/40.circuits/100.sha256-round-function.md new file mode 100644 index 00000000..01d4bbf2 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/100.sha256-round-function.md @@ -0,0 +1,375 @@ +--- +title: Sha256RoundFunction +description: +--- + +## Sha256RoundFunction PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L9) + +```rust +pub struct PrecompileFunctionInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub initial_memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/base_structures/precompile_input_outputs/mod.rs#L42) + +```rust +pub struct PrecompileFunctionOutputData<F: SmallField> { + pub final_memory_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/input.rs#L59) + +```rust +pub struct Sha256RoundFunctionFSMInputOutput<F: SmallField> { + pub internal_fsm: Sha256RoundFunctionFSM<F>, + pub log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} + +pub struct Sha256RoundFunctionFSM<F: SmallField> { + pub read_precompile_call: Boolean<F>, + pub read_words_for_round: Boolean<F>, + pub completed: Boolean<F>, + pub sha256_inner_state: [UInt32<F>; 8], + pub timestamp_to_use_for_read: UInt32<F>, + pub timestamp_to_use_for_write: UInt32<F>, + pub precompile_call_params: Sha256PrecompileCallParams<F>, +} +``` + +## Main circuit logic + +This is a precompile for the SHA256 hash function’s round function. + +We start from witness allocation: + +```rust +let Sha256RoundFunctionCircuitInstanceWitness { + closed_form_input, + requests_queue_witness, + memory_reads_witness, + } = witness; + +let mut structured_input = Sha256RoundFunctionCircuitInputOutput::alloc_ignoring_outputs( + cs, + closed_form_input.clone(), +); + +let start_flag = structured_input.start_flag; + +let requests_queue_state_from_input = structured_input.observable_input.initial_log_queue_state; +``` + +Check if `requests_queue_state_from_input` is trivial ( we didn't pop elements yet) and choose between input and `fsm` +queue state: + +```rust +requests_queue_state_from_input.enforce_trivial_head(cs); + +let requests_queue_state_from_fsm = structured_input.hidden_fsm_input.log_queue_state; + +let requests_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &requests_queue_state_from_input, + &requests_queue_state_from_fsm, +); +``` + +the same procedure we do for `memory_queue`: + +```rust +let memory_queue_state_from_input = + structured_input.observable_input.initial_memory_queue_state; + +// it must be trivial +memory_queue_state_from_input.enforce_trivial_head(cs); + +let memory_queue_state_from_fsm = structured_input.hidden_fsm_input.memory_queue_state; + +let memory_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &memory_queue_state_from_input, + &memory_queue_state_from_fsm, +); +``` + +Call `inner` part where is main logic: + +```rust +let final_state = sha256_precompile_inner::<F, CS, R>( + cs, + &mut memory_queue, + &mut requests_queue, + read_queries_allocator, + initial_state, + round_function, + limit, + ); +``` + +Form the final state (depending on flag we choose between states): + +```rust + + let done = final_state.completed; + structured_input.completion_flag = done; + structured_input.observable_output = PrecompileFunctionOutputData::placeholder(cs); + + structured_input.observable_output.final_memory_state = QueueState::conditionally_select( + cs, + structured_input.completion_flag, + &final_memory_state, + &structured_input.observable_output.final_memory_state, + ); + + structured_input.hidden_fsm_output.internal_fsm = final_state; + structured_input.hidden_fsm_output.log_queue_state = final_request_state; + structured_input.hidden_fsm_output.memory_queue_state = final_memory_state; +``` + +Finally, we compute a commitment to PublicInput and allocate it as witness variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); + let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); + for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); + } +``` + +### Inner part + +Start for set up different flags: `precompile_address`, `aux_byte_for_precompile`, and plugs: + +```rust +let precompile_address = UInt160::allocated_constant( + cs, + *zkevm_opcode_defs::system_params::SHA256_ROUND_FUNCTION_PRECOMPILE_FORMAL_ADDRESS, + ); +let aux_byte_for_precompile = UInt8::allocated_constant(cs, PRECOMPILE_AUX_BYTE); + +let boolean_false = Boolean::allocated_constant(cs, false); +let boolean_true = Boolean::allocated_constant(cs, true); +let zero_u32 = UInt32::zero(cs); +let zero_u256 = UInt256::zero(cs); +``` + +We can have a degenerate case when the queue is empty, but it's the first circuit in the queue, so we take default `FSM` +state that has `state.read_precompile_call = true`, we can only skip the full circuit if we are not in any form of +progress: + +```rust +let input_queue_is_empty = precompile_calls_queue.is_empty(cs); +let can_finish_immediately = + Boolean::multi_and(cs, &[state.read_precompile_call, input_queue_is_empty]); +``` + +Main work cycle: + +Check income data with constants(precompile addresses aux byte for precompile and must match): + +```rust +Num::conditionally_enforce_equal( + cs, + state.read_precompile_call, + &Num::from_variable(precompile_call.aux_byte.get_variable()), + &Num::from_variable(aux_byte_for_precompile.get_variable()), + ); +for (a, b) in precompile_call + .address + .inner + .iter() + .zip(precompile_address.inner.iter()) +{ + Num::conditionally_enforce_equal( + cs, + state.read_precompile_call, + &Num::from_variable(a.get_variable()), + &Num::from_variable(b.get_variable()), + ); +} +``` + +Create parameters that describe the call itself: + +```rust +let params_encoding = precompile_call.key; +let call_params = Sha256PrecompileCallParams::from_encoding(cs, params_encoding); + +state.precompile_call_params = Sha256PrecompileCallParams::conditionally_select( + cs, + state.read_precompile_call, + &call_params, + &state.precompile_call_params, +); +``` + +- `input_page` – memory page for `read_queue` +- `input_offset` – page index`read_queue` +- `output_page` – memory page for `write_queue` +- `output_offset` – page index`write_queue` +- `num_rounds` – number of rounds for hash function + +```rust +pub struct Sha256PrecompileCallParams<F: SmallField> { + pub input_page: UInt32<F>, + pub input_offset: UInt32<F>, + pub output_page: UInt32<F>, + pub output_offset: UInt32<F>, + pub num_rounds: UInt32<F>, +} +``` + +Setup `timestamp:` + +```rust +state.timestamp_to_use_for_read = UInt32::conditionally_select( + cs, + state.read_precompile_call, + &precompile_call.timestamp, + &state.timestamp_to_use_for_read, + ); + +// timestamps have large space, so this can be expected +let timestamp_to_use_for_write = + unsafe { state.timestamp_to_use_for_read.increment_unchecked(cs) }; +state.timestamp_to_use_for_write = UInt32::conditionally_select( + cs, + state.read_precompile_call, + ×tamp_to_use_for_write, + &state.timestamp_to_use_for_write, +); +``` + +Reset buffer if needed: + +```rust +let reset_buffer = Boolean::multi_or(cs, &[state.read_precompile_call, state.completed]); +state.read_words_for_round = Boolean::multi_or( + cs, + &[state.read_precompile_call, state.read_words_for_round], +); +state.read_precompile_call = boolean_false; +``` + +Now perform a few memory queries to read content: + +```rust +let zero_rounds_left = state.precompile_call_params.num_rounds.is_zero(cs); + +let mut memory_queries_as_u32_words = [zero_u32; 8 * MEMORY_READ_QUERIES_PER_CYCLE]; +let should_read = zero_rounds_left.negated(cs); +let mut bias_variable = should_read.get_variable(); +for dst in memory_queries_as_u32_words.array_chunks_mut::<8>() { + let read_query_value = + memory_read_witness.conditionally_allocate_biased(cs, should_read, bias_variable); + bias_variable = read_query_value.inner[0].get_variable(); + + let read_query = MemoryQuery { + timestamp: state.timestamp_to_use_for_read, + memory_page: state.precompile_call_params.input_page, + index: state.precompile_call_params.input_offset, + rw_flag: boolean_false, + is_ptr: boolean_false, + value: read_query_value, + }; + + let may_be_new_offset = unsafe { + state + .precompile_call_params + .input_offset + .increment_unchecked(cs) + }; + state.precompile_call_params.input_offset = UInt32::conditionally_select( + cs, + state.read_words_for_round, + &may_be_new_offset, + &state.precompile_call_params.input_offset, + ); + + // perform read + memory_queue.push(cs, read_query, should_read); +``` + +We need to change endianness. Memory is BE, and each of the 4-byte chunks should be interpreted as BE u32 for sha256: + +```rust +let be_bytes = read_query_value.to_be_bytes(cs); +for (dst, src) in dst.iter_mut().zip(be_bytes.array_chunks::<4>()) { + let as_u32 = UInt32::from_be_bytes(cs, *src); + *dst = as_u32; +} +``` + +get the initial state for `SHA256`: + +```rust +let sha256_empty_internal_state = sha256::ivs_as_uint32(cs); +let mut current_sha256_state = <[UInt32<F>; 8]>::conditionally_select( + cs, + reset_buffer, + &sha256_empty_internal_state, + &state.sha256_inner_state, + ); +``` + +finally, compute sha256 and write into memory if we completed all hash rounds. BTW `SHA256` algorithm you can read +[here](https://eips.ethereum.org/assets/eip-2680/sha256-384-512.pdf): + +```rust +let sha256_output = sha256::round_function::round_function_over_uint32( + cs, + &mut current_sha256_state, + &memory_queries_as_u32_words, +); +state.sha256_inner_state = current_sha256_state; + +let no_rounds_left = state.precompile_call_params.num_rounds.is_zero(cs); +let write_result = Boolean::multi_and(cs, &[state.read_words_for_round, no_rounds_left]); + +let mut write_word = zero_u256; +// some endianness magic +for (dst, src) in write_word + .inner + .iter_mut() + .rev() + .zip(sha256_output.array_chunks::<4>()) +{ + *dst = UInt32::from_le_bytes(cs, *src); +} + +let write_query = MemoryQuery { + timestamp: state.timestamp_to_use_for_write, + memory_page: state.precompile_call_params.output_page, + index: state.precompile_call_params.output_offset, + rw_flag: boolean_true, + is_ptr: boolean_false, + value: write_word, +}; +``` + +Update state: + +```rust +let input_is_empty = precompile_calls_queue.is_empty(cs); +let input_is_not_empty = input_is_empty.negated(cs); +let nothing_left = Boolean::multi_and(cs, &[write_result, input_is_empty]); +let process_next = Boolean::multi_and(cs, &[write_result, input_is_not_empty]); + +state.read_precompile_call = process_next; +state.completed = Boolean::multi_or(cs, &[nothing_left, state.completed]); +let t = Boolean::multi_or(cs, &[state.read_precompile_call, state.completed]); +state.read_words_for_round = t.negated(cs); +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/110.storage-application.md b/content/10.zk-stack/10.components/60.prover/40.circuits/110.storage-application.md new file mode 100644 index 00000000..f3465d4a --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/110.storage-application.md @@ -0,0 +1,222 @@ +--- +title: StorageApplication +description: +--- + +## StorageApplication PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/input.rs#L56) + +```rust +pub struct StorageApplicationInputData<F: SmallField> { + pub shard: UInt8<F>, + pub initial_root_hash: [UInt8<F>; 32], + pub initial_next_enumeration_counter: [UInt32<F>; 2], + pub storage_application_log_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/input.rs#L77) + +```rust +pub struct StorageApplicationOutputData<F: SmallField> { + pub new_root_hash: [UInt8<F>; 32], + pub new_next_enumeration_counter: [UInt32<F>; 2], + pub state_diffs_keccak256_hash: [UInt8<F>; 32], +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/input.rs#L29) + +```rust +pub struct StorageApplicationFSMInputOutput<F: SmallField> { + pub current_root_hash: [UInt8<F>; 32], + pub next_enumeration_counter: [UInt32<F>; 2], + pub current_storage_application_log_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub current_diffs_keccak_accumulator_state: + [[[UInt8<F>; keccak256::BYTES_PER_WORD]; keccak256::LANE_WIDTH]; keccak256::LANE_WIDTH], +} +``` + +## Main circuit logic + +This circuit takes storage requests from `storage_application_log_state`. Then for each query, it verifies the read +value and updates the `root_hash` is needed. Also, it outputs the hash of storage diffs. Shard_id if enforces to be 0 +for now, because we have only one shard. + +### First part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/mod.rs#L281) + +The circuit begins with allocating input part of the PI. + +```rust +let StorageApplicationCircuitInstanceWitness { + closed_form_input, + storage_queue_witness, + merkle_paths, + leaf_indexes_for_reads, +} = witness; + +let mut structured_input = + StorageApplicationInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); +``` + +We chose what `storage_application_log_state`, `root_hash` and other fields to continue to work with. + +```rust +let mut current_root_hash = UInt8::<F>::parallel_select( + cs, + start_flag, + &structured_input.observable_input.initial_root_hash, + &structured_input.hidden_fsm_input.current_root_hash, +); + +let storage_accesses_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &storage_queue_state_from_input, + &storage_queue_state_from_fsm, +); + +... +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/mod.rs#L393) + +Here’s the part, where all the main logic is implemented. Firstly, we take a new storage request if needed. + +```rust +let (storage_log, _) = storage_accesses_queue.pop_front(cs, parse_next_queue_elem); +``` + +Now we can parse it and do some checks. + +```rust +let LogQuery { + address, + key, + read_value, + written_value, + rw_flag, + shard_id, + .. +} = storage_log; +``` + +We need a merkle path for executing query. + +```rust +for _ in 0..STORAGE_DEPTH { + let wit = merkle_path_witness_allocator.conditionally_allocate_biased( + cs, + parse_next_queue_elem, + bias_variable, + ); + bias_variable = wit.inner[0].get_variable(); + new_merkle_path_witness.push(wit); +} +``` + +Also, we update `state_diffs` data. + +```rust +state_diff_data.address = UInt8::parallel_select( + cs, + parse_next_queue_elem, + &address_bytes, + &state_diff_data.address, +); +state_diff_data.key = + UInt8::parallel_select(cs, parse_next_queue_elem, &key_bytes, &state_diff_data.key); +state_diff_data.derived_key = UInt8::parallel_select( + cs, + parse_next_queue_elem, + &derived_key, + &state_diff_data.derived_key, +); +... +``` + +Finally, we compute a new merkle path. + +```rust +let mut current_hash = blake2s(cs, &leaf_bytes); + +for (path_bit, path_witness) in path_selectors + .into_iter() + .zip(merkle_path_witness.into_iter()) +{ + let left = UInt8::parallel_select(cs, path_bit, &path_witness, ¤t_hash); + let right = UInt8::parallel_select(cs, path_bit, ¤t_hash, &path_witness); + let mut input = [zero_u8; 64]; + input[0..32].copy_from_slice(&left); + input[32..64].copy_from_slice(&right); + + current_hash = blake2s(cs, &input); +} +``` + +If it was a write request, then we update the `root_hash`. Otherwise, we enforce that it’s still the same. + +```rust +current_root_hash = UInt8::parallel_select( + cs, + write_stage_in_progress, + ¤t_hash, + ¤t_root_hash, +); + +for (a, b) in current_root_hash.iter().zip(current_hash.iter()) { + Num::conditionally_enforce_equal( + cs, + should_compare_roots, + &Num::from_variable(a.get_variable()), + &Num::from_variable(b.get_variable()), + ); +} +``` + +In the end, we update `state_diffs` state. + +```rust +for block in + extended_state_diff_encoding.array_chunks::<{ keccak256::KECCAK_RATE_BYTES }>() +{ + keccak256_conditionally_absorb_and_run_permutation( + cs, + write_stage_in_progress, + &mut diffs_keccak_accumulator_state, + block, + ); +} +``` + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_application/mod.rs#L643) + +We need to run padding and one more permutation for final output. + +```rust +keccak256_conditionally_absorb_and_run_permutation( + cs, + boolean_true, + &mut diffs_keccak_accumulator_state, + &padding_block, +); +``` + +Now we update PI output parts and compute a commitment. Then we allocate it as public variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/10.index.md b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/10.index.md new file mode 100644 index 00000000..d073b9c4 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/10.index.md @@ -0,0 +1,62 @@ +--- +title: Overview +description: +--- + +We have four circuits, that receive some queue of elements and do sorting and deduplicating: + +- [SortDecommitments](/zk-stack/components/prover/circuits/sorting/sort-decommitments) +- [StorageSorter](/zk-stack/components/prover/circuits/sorting/storage-sorter), +- [LogSorter](/zk-stack/components/prover/circuits/sorting/log-sorter) - used by EventsSorter and L1MessageSorter. + +The main scenario is the following: we have an `input_queue` of elements, that + +1. could be compared between each other, + +2. could be represented (encoded) as `[Num<F>; N]`. + +Then we create `sorted_queue`, that contains all the elements in sorted order. + +And we create an empty `result_queue` to store the results. + +In the end, we can compute `challenges` that is `[Num<F>, N+1]` from states of `input_queue` and `sorted_queue`. + +Then the algorithm is the following: + +```rust +let mut lhs = 1; +let mut rhs = 1; + +assert!(input_queue.len() == sorted_queue.len()); +let previous_element = input_queue.pop(); +let previous_sorted_element = sorted_queue.pop(); +loop { + previous_encoding: [Num<F>; N] = previous_element.to_encoding(); + previous_sorted_encoding: [Num<F>; N] = previous_sorted_element.to_encoding(); + + lhs *= previous_encoding[0] * challenges[0] + + previous_encoding[1] * challenges[1] + + ... + + challenges[N]; + + rhs *= previous_sorted_encoding[0] * challenges[0] + + previous_sorted_encoding[1] * challenges[1] + + ... + + challenges[N]; + + if input_queue.is_empty() || sorted_queue.is_empty() { + break; + } + + let next_element = input_queue.pop(); + let next_sorted_element = sorted_queue.pop(); + + assert!(next_sorted_element >= previous_sorted_element); + + previous_element = next_element; + previous_sorted_element = next_sorted_element; +} +assert!(lhs == rhs); +``` + +You can read more about permutation argument [here](https://triton-vm.org/spec/permutation-argument.html). diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/20.sort-decommitments.md b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/20.sort-decommitments.md new file mode 100644 index 00000000..4c2babd8 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/20.sort-decommitments.md @@ -0,0 +1,235 @@ +--- +title: SortDecommitments +description: +--- + +## SortDecommitments PI + +[Input](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_Decommitment_requests/input.rs#L62) + +```rust +pub struct CodeDecommitmentsDeduplicatorInputData<F: SmallField> { + pub initial_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub sorted_queue_initial_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +[Output](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_Decommitment_requests/input.rs#L81) + +```rust +pub struct CodeDecommittmentsDeduplicatorOutputData<F: SmallField> { + pub final_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +[FSM Input and FSM Output](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_decommittment_requests/input.rs#L26) + +```rust +pub struct CodeDecommittmentsDeduplicatorFSMInputOutput<F: SmallField> { + pub initial_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub sorted_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub final_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + + pub lhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub rhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + + pub previous_packed_key: [UInt32<F>; PACKED_KEY_LENGTH], + pub first_encountered_timestamp: UInt32<F>, + pub previous_record: DecommitQuery<F>, +} +``` + +## Main circuit logic + +This circuit handles the sorting and deduplication of code cancellation requests. Before starting, during the pre-start +phase, the first decommitter queue is generated. To decommitter a code, the input will receive the hash root of the code, +the length of the code, the code hash of the opcode, the number of opcodes and the code of the page. Next, it sorts the +queue and, in the process, identifies and removes identical requests, serving as a filtering mechanism in case the same +contract is called several times. + +The detailed explanation of sorting and deduplicating can be found +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +[First part](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_decommittment_requests/mod.rs#L51) + +The circuit begins with allocating input part of the PI. + +```rust +let CodeDecommitmentsDeduplicatorInstanceWitness { + closed_form_input, + initial_queue_witness, + sorted_queue_witness, +} = witness; + +let mut structured_input = CodeDecommitmentsDeduplicatorInputOutput::alloc_ignoring_outputs( + cs, + closed_form_input.clone(), +); +``` + +In this part, we should decide what `initial_queue_state` to use (the one from `Input` or the other one from +`FSM Input`). We do the same for sorted queue. + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &initial_queue_from_passthrough_state, + &initial_log_queue_state_from_fsm_state, +); +``` + +Also, we decide to create a new result queue or use one from the previous circuit. + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &empty_state, + &final_sorted_queue_from_fsm_state, +); +``` + +Now we need to generate challenges for permutation argument. + +```rust +let challenges = crate::utils::produce_fs_challenges::< + F, + CS, + R, + FULL_SPONGE_QUEUE_STATE_WIDTH, + { DECOMMIT_QUERY_PACKED_WIDTH + 1 }, + DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS, +>( + cs, + structured_input.observable_input.initial_queue_state.tail, + structured_input + .observable_input + .sorted_queue_initial_state + .tail, + round_function, +); +``` + +And decide whether we generate new accumulators for permutation argument or use existing ones. + +```rust +let initial_lhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.lhs_accumulator, +); + +let initial_rhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.rhs_accumulator, +); +``` + +Also, we make other parts of FSM state based on `start_flag`. + +```rust +let mut previous_record = DecommitQuery::conditionally_select( + cs, + structured_input.start_flag, + &trivial_record, + &structured_input.hidden_fsm_input.previous_record, +); + +let mut previous_packed_key = <[UInt32<F>; PACKED_KEY_LENGTH]>::conditionally_select( + cs, + structured_input.start_flag, + &[zero_u32; PACKED_KEY_LENGTH], + &structured_input.hidden_fsm_input.previous_packed_key, +); + +let mut first_encountered_timestamp = UInt32::conditionally_select( + cs, + structured_input.start_flag, + &zero_u32, + &structured_input + .hidden_fsm_input + .first_encountered_timestamp, +); +``` + +[Main part](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_Decommitment_requests/mod.rs#L234) + +Here we implement the main logic of the circuit. We run a cycle where on each iteration we try to pop a new element. + +```rust +let (_, original_encoding) = original_queue.pop_front(cs, should_pop); +let (sorted_item, sorted_encoding) = sorted_queue.pop_front(cs, should_pop); +``` + +We compute contribution to permutation argument accumulators. + +```rust +for ((challenges, lhs), rhs) in fs_challenges.iter().zip(lhs.iter_mut()).zip(rhs.iter_mut()) +{ + ... +} +``` + +After, we enforce that elements from sorted queue are actually sorted. + +```rust +new_key_is_greater.conditionally_enforce_true(cs, should_pop); +``` + +Also, we need to deduplicate some decommit requests if there are the same ones. + +```rust +// decide if we should add the PREVIOUS into the queue +let add_to_the_queue = Boolean::multi_and(cs, &[previous_is_non_trivial, different_hash]); + +result_queue.push(cs, record_to_add, add_to_the_queue); +``` + +Now we update inner variables. + +```rust +previous_item_is_trivial = is_trivial; +// may be update the timestamp +*first_encountered_timestamp = UInt32::conditionally_select( + cs, + same_hash, + &first_encountered_timestamp, + &sorted_item.timestamp, +); +*previous_record = sorted_item; +*previous_packed_key = packed_key; +``` + +In the end, if the queues are empty, and we have taken the last element, we push it immediately. + +```rust +let add_to_the_queue = Boolean::multi_and(cs, &[previous_is_non_trivial, completed]); + +result_queue.push(cs, record_to_add, add_to_the_queue); +``` + +[Final part](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/sort_Decommitment_requests/mod.rs#L191C1-L191C1) + +We check that permutation accumulators are equal, if the queues are already empty. + +```rust +for (lhs, rhs) in new_lhs.iter().zip(new_rhs.iter()) { + Num::conditionally_enforce_equal(cs, completed, lhs, rhs); +} +``` + +Now we update PI output parts and compute a commitment. Then we allocate it as public variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/30.storage-sorter.md b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/30.storage-sorter.md new file mode 100644 index 00000000..3be7f648 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/30.storage-sorter.md @@ -0,0 +1,292 @@ +--- +title: StorageSorter +--- + +## StorageSorter PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/input.rs#L84C57-L84C57) + +```rust +pub struct StorageDeduplicatorInputData<F: SmallField> { + pub shard_id_to_process: UInt8<F>, + pub unsorted_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/input.rs#L103) + +```rust +pub struct StorageDeduplicatorOutputData<F: SmallField> { + pub final_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/input.rs#L37) + +```rust +pub struct StorageDeduplicatorFSMInputOutput<F: SmallField> { + pub lhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub rhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub current_unsorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub current_intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub current_final_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub cycle_idx: UInt32<F>, + pub previous_packed_key: [UInt32<F>; PACKED_KEY_LENGTH], + pub previous_key: UInt256<F>, + pub previous_address: UInt160<F>, + pub previous_timestamp: UInt32<F>, + pub this_cell_has_explicit_read_and_rollback_depth_zero: Boolean<F>, + pub this_cell_base_value: UInt256<F>, + pub this_cell_current_value: UInt256<F>, + pub this_cell_current_depth: UInt32<F>, +} +``` + +--- +## Main circuit logic + +The main logic of this circuit is sorting and deduplicating storage requests from `unsorted_log_queue_state`. The result +storage requests are pushed to `final_sorted_queue_state`. + +### First part + +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/mod.rs#L177) + +We start, as usually, with allocating input fields from PI. + +```rust +let mut structured_input = StorageDeduplicatorInputOutput::alloc_ignoring_outputs( + cs, + structured_input_witness.clone(), +); +``` + +In this part, we should decide what `unsorted_queue_state` to use (the one from `Input` or the other one from +`FSM Input`). We do the same for sorted queue. + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &unsorted_queue_from_passthrough_state, + &unsorted_queue_from_fsm_input_state, +); + +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &intermediate_sorted_queue_from_passthrough.into_state(), + &intermediate_sorted_queue_from_fsm_input.into_state(), +); +``` + +Also, we decide to create a new queue for the output, or continue working with the existing one. + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &empty_final_sorted_queue.into_state(), + &final_sorted_queue_from_fsm_input.into_state(), +); +``` + +Now we need to generate challenges for permutation argument. + +```rust +let challenges = crate::utils::produce_fs_challenges::< + F, + CS, + R, + QUEUE_STATE_WIDTH, + { TIMESTAMPED_STORAGE_LOG_ENCODING_LEN + 1 }, + DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS, +>( + cs, + structured_input + .observable_input + .unsorted_log_queue_state + .tail, + structured_input + .observable_input + .intermediate_sorted_queue_state + .tail, + round_function, +); +``` + +And decide whether we generate new accumulators for permutation argument or use existing ones. + +```rust +let initial_lhs = + <[Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS]>::conditionally_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.lhs_accumulator, + ); + +let initial_rhs = + <[Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS]>::conditionally_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.rhs_accumulator, + ); +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/mod.rs#L558) + +Here we implement the main logic of the circuit. We run a cycle where on each iteration we try to pop a new element. + +```rust +let (_, original_encoding) = original_queue.pop_front(cs, should_pop); +let (sorted_item, sorted_encoding) = intermediate_sorted_queue.pop_front(cs, should_pop); +``` + +Then we accumulate encodings for permutation argument. You can read more about it +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +```rust +for (((lhs_dst, rhs_dst), challenges), additive_part) in lhs + .iter_mut() + .zip(rhs.iter_mut()) + .zip(fs_challenges.iter()) + .zip(additive_parts.iter()) +{ + lhs_lc.clear(); + rhs_lc.clear(); + + for ((original_el, sorted_el), challenge) in extended_original_encoding + .iter() + .zip(sorted_encoding.iter()) + .zip(challenges.iter()) + { + let lhs_contribution = original_el.mul(cs, &challenge); + let rhs_contribution = sorted_el.mul(cs, &challenge); + + lhs_lc.push((lhs_contribution.get_variable(), F::ONE)); + rhs_lc.push((rhs_contribution.get_variable(), F::ONE)); + } + + lhs_lc.push((additive_part.get_variable(), F::ONE)); + rhs_lc.push((additive_part.get_variable(), F::ONE)); + + let lhs_lc = Num::linear_combination(cs, &lhs_lc); + let rhs_lc = Num::linear_combination(cs, &rhs_lc); + + let lhs_candidate = lhs_dst.mul(cs, &lhs_lc); + let rhs_candidate = rhs_dst.mul(cs, &rhs_lc); + + *lhs_dst = Num::conditionally_select(cs, should_pop, &lhs_candidate, &*lhs_dst); + *rhs_dst = Num::conditionally_select(cs, should_pop, &rhs_candidate, &*rhs_dst); +} +``` + +Now we enforce sorting. + +```rust +previous_key_is_greater.conditionally_enforce_false(cs, not_item_is_trivial); +``` + +Maybe we should push the old query if the new key is different. So we push if at least one of these conditions holds: + +- there was a read at depth 0; +- the sell is changes; +- write that was declined, but not by a rollback. + +```rust +let query = LogQuery { + address: previous_address, + key: previous_key, + read_value: this_cell_base_value, + written_value: this_cell_current_value, + rw_flag: should_write, + aux_byte: UInt8::zero(cs), + rollback: Boolean::allocated_constant(cs, false), + is_service: Boolean::allocated_constant(cs, false), + shard_id: shard_id_to_process, + tx_number_in_block: UInt32::zero(cs), + timestamp: UInt32::zero(cs), +}; + +sorted_queue.push(cs, query, should_push); +``` + +After that, we update some inner variables. + +```rust +let meaningful_value = UInt256::conditionally_select( + cs, + record.rw_flag, + &record.written_value, + &record.read_value, +); + +this_cell_base_value = UInt256::conditionally_select( + cs, + new_non_trivial_cell, + &record.read_value, + &this_cell_base_value, +); + +... +``` + +Now we continue working with current query. We check that the read field is correct. + +```rust +let read_is_equal_to_current = + UInt256::equals(cs, &this_cell_current_value, &record.read_value); +read_is_equal_to_current.conditionally_enforce_true(cs, check_read_consistency); +``` + +After that, we do some other variable updates. + +After the main cycle, we do one more iteration if we took the last query from the queue during the last cycle. + +```rust +let query = LogQuery { + address: previous_address, + key: previous_key, + read_value: this_cell_base_value, + written_value: this_cell_current_value, + rw_flag: should_write, + aux_byte: UInt8::zero(cs), + rollback: Boolean::allocated_constant(cs, false), + is_service: Boolean::allocated_constant(cs, false), + shard_id: shard_id_to_process, + tx_number_in_block: UInt32::zero(cs), + timestamp: UInt32::zero(cs), +}; + +sorted_queue.push(cs, query, should_push); +``` + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/storage_validity_by_grand_product/mod.rs#L424) + +If the queues are empty, we check the permutation argument accumulators equality. + +```rust +let completed = unsorted_is_empty.and(cs, sorted_is_empty); +new_lhs.iter().zip(new_rhs).for_each(|(l, r)| { + Num::conditionally_enforce_equal(cs, completed, &l, &r); +}); +``` + +Now we update PI output parts and compute a commitment. Then we allocate it as public variables. + +```rust +let input_commitment = + commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/40.log-sorter.md b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/40.log-sorter.md new file mode 100644 index 00000000..974c3dbb --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/40.log-sorter.md @@ -0,0 +1,354 @@ +--- +title: LogSorter +description: +--- + +`LogSorter` is one circuit that is used as both `EventsSorter` and `L1MessagesSorter`. + +## LogSorter PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L57) + +```rust +pub struct EventsDeduplicatorInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L74) + +```rust +pub struct EventsDeduplicatorOutputData<F: SmallField> { + pub final_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L28) + +```rust +pub struct EventsDeduplicatorFSMInputOutput<F: SmallField> { + pub lhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub rhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub initial_unsorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub final_result_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub previous_key: UInt32<F>, + pub previous_item: LogQuery<F>, +} +``` + +--- +## Main circuit logic + +The main logic of this circuit is sorting and deduplicating logs from `initial_log_queue_state`. The result is pushed to `final_queue_state`. + +With sorting, we get 2 queues – a simple one, and a sorted one. + +We start with the witness allocation: + +```rust +let mut structured_input = + EventsDeduplicatorInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); +``` + +Now the scheme is familiar. + +Check if we didn't take elements from the queue: + +```rust +unsorted_queue_from_passthrough_state.enforce_trivial_head(cs); +``` + +Judging by the flag, we choose a queue: + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &unsorted_queue_from_passthrough_state, + &unsorted_queue_from_fsm_input_state, + ); +``` + +Wrap the state and witnesses for it in `StorageLogQueue`, thereby preparing the input data for `inner`: + +```rust +let mut unsorted_queue = StorageLogQueue::<F, R>::from_state(cs, state); + + use std::sync::Arc; + let initial_queue_witness = CircuitQueueWitness::from_inner_witness(initial_queue_witness); + unsorted_queue.witness = Arc::new(initial_queue_witness); + + let intermediate_sorted_queue_from_passthrough_state = structured_input + .observable_input + .intermediate_sorted_queue_state; +``` + +For `sorted_queue`, it is the same procedure. + +We generate challenges and accumulators for the permutation argument. A detailed explanation can be found [here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +```rust +let challenges = crate::utils::produce_fs_challenges::< + F, + CS, + R, + QUEUE_STATE_WIDTH, + { MEMORY_QUERY_PACKED_WIDTH + 1 }, + DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS, + >( + cs, + structured_input + .observable_input + .initial_log_queue_state + .tail, + structured_input + .observable_input + .intermediate_sorted_queue_state + .tail, + round_function, + ); +``` + +Again, if it is not the rest cycle (`start_flag == false`), we should choose fsm: + +```rust +let one = Num::allocated_constant(cs, F::ONE); +let initial_lhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.lhs_accumulator, +); + +let initial_rhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.rhs_accumulator, +); +``` + +Depending on the flag, we prepare all the information for `inner` part: + +```rust +let zero_u32 = UInt32::zero(cs); +let previous_key = UInt32::conditionally_select( + cs, + structured_input.start_flag, + &zero_u32, + &structured_input.hidden_fsm_input.previous_key, +); +``` + +```rust +let empty_storage = LogQuery::placeholder(cs); +let previous_item = LogQuery::conditionally_select( + cs, + structured_input.start_flag, + &empty_storage, + &structured_input.hidden_fsm_input.previous_item, +); +``` + +After `inner` part we check `unsorted_queue` and `intermediate_sorted_queue`.: + +```rust +let unsorted_is_empty = unsorted_queue.is_empty(cs); +let sorted_is_empty = intermediate_sorted_queue.is_empty(cs); + +Boolean::enforce_equal(cs, &unsorted_is_empty, &sorted_is_empty); +``` + +We check that permutation accumulators are equal and if the queues are already empty: + +```rust +let completed = unsorted_queue.length.is_zero(cs); + for (lhs, rhs) in new_lhs.iter().zip(new_rhs.iter()) { + Num::conditionally_enforce_equal(cs, completed, lhs, rhs); + } +``` + +Finally, we compute a commitment to PublicInput and allocate it as witness variables. + +```rust +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` + +### Inner part + +Note: we have specific logic for rollback. +When we have an event of some function and then that function makes a return then we should cancel this event. +Inside the VM, we create exactly the same event: same key, block number, timestamp, etc. the only change is that the rollback flag is now true. +In the inner part, first sort and look for these pairs and self-destruct them. + +There are two cases: when `unsorted_queue` is empty, but it's the only circuit, in this case. Otherwise, we continue, and then it's not trivial. + +```rust +let no_work = unsorted_queue.is_empty(cs); +let mut previous_is_trivial = Boolean::multi_or(cs, &[no_work, is_start]); +``` + +Additional checks for length. We should always check whether the sorted queue and the normal queue are of the same length. + +```rust +let unsorted_queue_length = Num::from_variable(unsorted_queue.length.get_variable()); +let intermediate_sorted_queue_length = + Num::from_variable(intermediate_sorted_queue.length.get_variable()); + +Num::enforce_equal( + cs, + &unsorted_queue_length, + &intermediate_sorted_queue_length, +); +``` + +We can pop elements if unsorted_queue is empty. +That’s why every time we set up the flags `original_is_empty`, `sorted_is_empty`. +We also ensure that items are "write" unless it's a padding. + +```rust +let original_is_empty = unsorted_queue.is_empty(cs); +let sorted_is_empty = intermediate_sorted_queue.is_empty(cs); +Boolean::enforce_equal(cs, &original_is_empty, &sorted_is_empty); + +let should_pop = original_is_empty.negated(cs); +let is_trivial = original_is_empty; + +let (_, original_encoding) = unsorted_queue.pop_front(cs, should_pop); +let (sorted_item, sorted_encoding) = intermediate_sorted_queue.pop_front(cs, should_pop); +``` + +The next block of code is sorting. You can find the main idea [here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +Check if keys are equal and check a value. +We compare timestamps and then resolve logic over rollbacks, so the only way when keys are equal can be when we do a rollback. +Ensure sorting for uniqueness timestamp and rollback flag. +We know that timestamps are unique across logs, and are also the same between write and rollback. +Keys are always ordered no matter what, and are never equal unless it's padding: + +```rust +let sorting_key = sorted_item.timestamp; +let (keys_are_equal, new_key_is_smaller) = + unpacked_long_comparison(cs, &[previous_key], &[sorting_key]); +new_key_is_smaller.conditionally_enforce_false(cs, should_pop); +``` + +There are only two cases when keys are equal: + +- it's a padding element +- it's a rollback + +It's enough to compare timestamps, as the VM circuit guarantees uniqueness if it's not a padding. Now ensure sorting: + +```rust +let previous_is_not_rollback = previous_item.rollback.negated(cs); +let enforce_sequential_rollback = Boolean::multi_and( + cs, + &[previous_is_not_rollback, sorted_item.rollback, should_pop], +); +keys_are_equal.conditionally_enforce_true(cs, enforce_sequential_rollback); + +let same_log = UInt32::equals(cs, &sorted_item.timestamp, &previous_item.timestamp); + +let values_are_equal = + UInt256::equals(cs, &sorted_item.written_value, &previous_item.written_value); + +let negate_previous_is_trivial = previous_is_trivial.negated(cs); +let should_enforce = Boolean::multi_and(cs, &[same_log, negate_previous_is_trivial]); + +values_are_equal.conditionally_enforce_true(cs, should_enforce); + +let this_item_is_non_trivial_rollback = + Boolean::multi_and(cs, &[sorted_item.rollback, should_pop]); +let negate_previous_item_rollback = previous_item.rollback.negated(cs); +let previous_item_is_non_trivial_write = Boolean::multi_and( + cs, + &[negate_previous_item_rollback, negate_previous_is_trivial], +); +let is_sequential_rollback = Boolean::multi_and( + cs, + &[ + this_item_is_non_trivial_rollback, + previous_item_is_non_trivial_write, + ], +); +same_log.conditionally_enforce_true(cs, is_sequential_rollback); +``` + +Decide if we should add the previous into the queue. We add only if the previous one is not trivial, it had a different key, and it wasn't rolled back: + +```rust +let negate_same_log = same_log.and(cs, should_pop).negated(cs); +let add_to_the_queue = Boolean::multi_and( + cs, + &[ + negate_previous_is_trivial, + negate_same_log, + negate_previous_item_rollback, + ], +); +``` + +Further, we don't need in our `LogQueue` some fields, so we just clean up: + +```rust +let boolean_false = Boolean::allocated_constant(cs, false); +let query_to_add = LogQuery { + address: previous_item.address, + key: previous_item.key, + read_value: UInt256::zero(cs), + written_value: previous_item.written_value, + rw_flag: boolean_false, + aux_byte: UInt8::zero(cs), + rollback: boolean_false, + is_service: previous_item.is_service, + shard_id: previous_item.shard_id, + tx_number_in_block: previous_item.tx_number_in_block, + timestamp: UInt32::zero(cs), +}; +``` + +Finalization step - same way, check if the last item is not a rollback: + +```rust +let now_empty = unsorted_queue.is_empty(cs); + +let negate_previous_is_trivial = previous_is_trivial.negated(cs); +let negate_previous_item_rollback = previous_item.rollback.negated(cs); +let add_to_the_queue = Boolean::multi_and( + cs, + &[ + negate_previous_is_trivial, + negate_previous_item_rollback, + now_empty, + ], +); +let boolean_false = Boolean::allocated_constant(cs, false); +let query_to_add = LogQuery { + address: previous_item.address, + key: previous_item.key, + read_value: UInt256::zero(cs), + written_value: previous_item.written_value, + rw_flag: boolean_false, + aux_byte: UInt8::zero(cs), + rollback: boolean_false, + is_service: previous_item.is_service, + shard_id: previous_item.shard_id, + tx_number_in_block: previous_item.tx_number_in_block, + timestamp: UInt32::zero(cs), +}; + +result_queue.push(cs, query_to_add, add_to_the_queue); + +unsorted_queue.enforce_consistency(cs); +intermediate_sorted_queue.enforce_consistency(cs); +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/_dir.yml b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/_dir.yml new file mode 100644 index 00000000..51af68c7 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/120.sorting/_dir.yml @@ -0,0 +1 @@ +title: Sorting and Deduplicating diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/15.circuit-testing.md b/content/10.zk-stack/10.components/60.prover/40.circuits/15.circuit-testing.md new file mode 100644 index 00000000..7567f51a --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/15.circuit-testing.md @@ -0,0 +1,64 @@ +--- +title: Circuit Testing +description: +--- + +<!-- TODO: swap out images for code samples --> + +This page explains unit tests for circuits. Specifically, it goes through a unit test of +[ecrecover](https://github.com/matter-labs/era-zkevm_circuits/blob/main/src/ecrecover/mod.rs#L796). The tests for other +circuits are very similar. + +Many of the tests for different circuits are nearly identical, for example: + +- test_signature_for_address_verification (ecrecover) +- test_code_unpacker_inner +- test_demultiplex_storage_logs_inner +- and several others. + +If you understand one, you will quickly be able to understand them all. + +Let’s focus on ecrecover. Ecrecover is a precompile that, given your signature, can compute your address. If our circuit +works correctly, we should be able to recover the proper address, and be able to prove the computation was done +correctly. + +![ECRecover testing](/images/zk-stack/circuit-ecrecover.png) + +The test begins by defining `geometry`, `max_variables`, and `max_trace_len`. This data will be used to create the +constraint system. Next, we define a helper function: + +![ECRecover geometry](/images/zk-stack/circuits-ecrecover-geometry.png) + +To help run the test, we have a helper function called `configure` that returns a builder. The builder knows all of the +gates and gate placement strategy, which will be useful for setting up the constraint system. + +![Code block showing usage of `configure`](/images/zk-stack/circuits-configure-builder.png) + +The constraint system is almost ready! We still need to add the lookup tables for common boolean functions: + +![Code block showing creation of lookup tables](/images/zk-stack/circuit-lookup.png) + +Now the constraint system is ready! We can start the main part of the test! + +![Code block showing signature simulation](/images/zk-stack/circuits-address.png) + +Here we have hard coded a secret key with its associated public key, and generate a signature. We will test our circuit +on these inputs! Next we “allocate” these inputs as witnesses: + +![Code block showing witness allocation](/images/zk-stack/circuit-allocate.png) + +We have to use special integer types because we are working in a finite field. + +![Code block showing integer types](/images/zk-stack/circuit-finite-fields.png) + +The constants here are specific to the curve used, and are described in detail by code comments in the +ecrecover_precompile_inner_routine. + +Finally we can call the ecrecover_precompile_inner_routine: + +![Code block showing ecrecover precompile](/images/zk-stack/circuit-ecrecover-precompile.png) + +Lastly, we need to check to make sure that 1) we recovered the correct address, and 2) the constraint system can be +satisfied, meaning the proof works. + +![Code block comparing recovered address with original](/images/zk-stack/circuit-compare-addresses.png) diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/20.code-decommitter.md b/content/10.zk-stack/10.components/60.prover/40.circuits/20.code-decommitter.md new file mode 100644 index 00000000..dec75715 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/20.code-decommitter.md @@ -0,0 +1,217 @@ +--- +title: CodeDecommitter +description: +--- + +## CodeDecommitter PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/input.rs#L80) + +```rust +pub struct CodeDecommitterInputData<F: SmallField> { + pub memory_queue_initial_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub sorted_requests_queue_initial_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/input.rs#L100) + +```rust +pub struct CodeDecommitterOutputData<F: SmallField> { + pub memory_queue_final_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/input.rs#L61) + +```rust +pub struct CodeDecommitterFSMInputOutput<F: SmallField> { + pub internal_fsm: CodeDecommittmentFSM<F>, + pub decommittment_requests_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} + +pub struct CodeDecommittmentFSM<F: SmallField> { + pub sha256_inner_state: [UInt32<F>; 8], // 8 uint32 words of internal sha256 state + pub hash_to_compare_against: UInt256<F>, + pub current_index: UInt32<F>, + pub current_page: UInt32<F>, + pub timestamp: UInt32<F>, + pub num_rounds_left: UInt16<F>, + pub length_in_bits: UInt32<F>, + pub state_get_from_queue: Boolean<F>, + pub state_decommit: Boolean<F>, + pub finished: Boolean<F>, +} +``` + +## Main circuit logic + +This circuit takes a queue of decommit requests for DecommitSorter circuit. For each decommit request, it checks that +the linear hash of all opcodes will be equal to this hash that is stored in the decommit request. Also, it writes code +to the corresponding memory page. Briefly, it unpacks the queue from the opcode and updates the memory queue and check +correctness. + +### First part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/mod.rs#L48) + +The circuit begins with allocating input part of the PI. + +```rust +let CodeDecommitterCircuitInstanceWitness { + closed_form_input, + sorted_requests_queue_witness, + code_words, +} = witness; + +let mut structured_input = + CodeDecommitterCycleInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); +``` + +We chose what `memory_queue` state and `decommitments_queue` state to continue to work with. + +```rust +let requests_queue_state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &structured_input + .observable_input + .sorted_requests_queue_initial_state, + &structured_input + .hidden_fsm_input + .decommittment_requests_queue_state, +); + +let memory_queue_state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &structured_input.observable_input.memory_queue_initial_state, + &structured_input.hidden_fsm_input.memory_queue_state, +); +``` + +We do the same with inner FSM part. + +```rust +let initial_state = CodeDecommittmentFSM::conditionally_select( + cs, + structured_input.start_flag, + &starting_fsm_state, + &structured_input.hidden_fsm_input.internal_fsm, +); +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/mod.rs#L168) + +Here’s the part, where all the main logic is implemented. Firstly, we take a new decommit request if the queue is not +empty yet. + +```rust +let (may_be_new_request, _) = + unpack_requests_queue.pop_front(cs, state.state_get_from_queue); +``` + +Then we update the state of the circuit. + +```rust +state.num_rounds_left = UInt16::conditionally_select( + cs, + state.state_get_from_queue, + &length_in_rounds, + &state.num_rounds_left, +); +... +``` + +Then we create two write memory queries and push them to memory queue. + +```rust +let mem_query_0 = MemoryQuery { + timestamp: state.timestamp, + memory_page: state.current_page, + index: state.current_index, + rw_flag: boolean_true, + value: code_word_0, + is_ptr: boolean_false, +}; + +let mem_query_1 = MemoryQuery { + timestamp: state.timestamp, + memory_page: state.current_page, + index: state.current_index, + rw_flag: boolean_true, + value: code_word_1, + is_ptr: boolean_false, +}; + +memory_queue.push(cs, mem_query_0, state.state_decommit); +memory_queue.push(cs, mem_query_1, process_second_word); +``` + +Now we create a new input for hash to be absorbed. + +```rust +let mut sha256_input = [zero_u32; 16]; +for (dst, src) in sha256_input.iter_mut().zip( + code_word_0_be_bytes + .array_chunks::<4>() + .chain(code_word_1_be_bytes.array_chunks::<4>()), +) { + *dst = UInt32::from_be_bytes(cs, *src); +} +``` + +And absorb it to current state. + +```rust +let mut new_internal_state = state.sha256_inner_state; +round_function_over_uint32(cs, &mut new_internal_state, &sha256_input); +``` + +Also, we update current state. + +```rust +state.sha256_inner_state = <[UInt32<F>; 8]>::conditionally_select( + cs, + state.state_decommit, + &new_internal_state, + &state.sha256_inner_state, +); +``` + +Finally, we check the hash if necessary. + +```rust +for (part_of_first, part_of_second) in hash + .inner + .iter() + .zip(state.hash_to_compare_against.inner.iter()) +{ + Num::conditionally_enforce_equal( + cs, + finalize, + &part_of_first.into_num(), + &part_of_second.into_num(), + ); +} +``` + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/code_unpacker_sha256/mod.rs#L111) + +Now we update PI output parts and compute a commitment. Then we allocate it as public variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); + +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/30.demux-log-queue.md b/content/10.zk-stack/10.components/60.prover/40.circuits/30.demux-log-queue.md new file mode 100644 index 00000000..8abd4ec0 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/30.demux-log-queue.md @@ -0,0 +1,232 @@ +--- +title: DemuxLogQueue +description: +--- + +## DemuxLogQueue PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/demux_log_queue/input.rs#L49) + +```rust +pub struct LogDemuxerInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L33) + +```rust +pub struct LogDemuxerOutputData<F: SmallField> { + pub storage_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub events_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub l1messages_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub keccak256_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub sha256_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub ecrecover_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/demux_log_queue/input.rs#L22) + +```rust +pub struct LogDemuxerFSMInputOutput<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub storage_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub events_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub l1messages_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub keccak256_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub sha256_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub ecrecover_access_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +## Main circuit logic + +The input of Log_Demuxer receives log_queue, consisting of a request to storage, events, L1messages request, and a +request to the precompiles ecrecover, sha256, and keccak256. It divides this queue into six new queues. See our diagram. + +### Start + +The function of circuits is `demultiplex_storage_logs_enty_point`. We start for allocation of queue witnesses: + +```rust +let mut structured_input = + LogDemuxerInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); +``` + +Then we must verify that no elements have already been retrieved from the queue: + +```rust +structured_input + .observable_input + .initial_log_queue_state + .enforce_trivial_head(cs); +``` + +So long as `tail` is some equivalent of the merkle tree root and `head` is an equivalent of the current node hash, we +provide some path witness when we pop elements and require that we properly end up in the root. So we must prove that +element of head is zero: + +```rust +pub fn enforce_trivial_head<CS: ConstraintSystem<F>>(&self, cs: &mut CS) { + let zero_num = Num::zero(cs); + for el in self.head.iter() { + Num::enforce_equal(cs, el, &zero_num); + } +} +``` + +Depends on `start_flag` we select which queue `observable_input` or `fsm_input`(internal intermediate queue) we took: + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &structured_input.observable_input.initial_log_queue_state, + &structured_input.hidden_fsm_input.initial_log_queue_state, +); +``` + +Wrap the state and witnesses in `StorageLogQueue`, thereby preparing the input data for `inner` part: + +```rust +let mut initial_queue = StorageLogQueue::<F, R>::from_state(cs, state); +use std::sync::Arc; +let initial_queue_witness = CircuitQueueWitness::from_inner_witness(initial_queue_witness); +initial_queue.witness = Arc::new(initial_queue_witness); +``` + +For the rest, it selects between empty or from FSM: + +```rust +let queue_states_from_fsm = [ +&structured_input.hidden_fsm_input.storage_access_queue_state, +&structured_input.hidden_fsm_input.events_access_queue_state, +&structured_input + .hidden_fsm_input + .l1messages_access_queue_state, +&structured_input + .hidden_fsm_input + .keccak256_access_queue_state, +&structured_input.hidden_fsm_input.sha256_access_queue_state, +&structured_input + .hidden_fsm_input + .ecrecover_access_queue_state, +]; + +let empty_state = QueueState::empty(cs); +let [mut storage_access_queue, mut events_access_queue, mut l1messages_access_queue, mut keccak256_access_queue, mut sha256_access_queue, mut ecrecover_access_queue] = +queue_states_from_fsm.map(|el| { +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &empty_state, + &el, + ); + StorageLogQueue::<F, R>::from_state(cs, state) +}); +``` + +Prepared all queues into `input_queues` and call `inner` part: + +```rust +demultiplex_storage_logs_inner(cs, &mut initial_queue, input_queues, limit); +``` + +The last step is to form the final state. The flag `completed` shows us if `initial_queue` is empty or not. If not, we +fill fsm_output. If it is empty, we select observable_output for the different queues. + +Finally, we compute a commitment to PublicInput and allocate it as witness variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); + +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` + +### Inner part + +This is the logic part of the circuit. It depends on the main queue `storage_log_queue`, which separates the other +queues. After we have dealt with the initial precompile, we need to allocate constant addresses for +`keccak_precompile_address`, `sha256_precompile_address`, `ecrecover_precompile_address` and allocate constants for +`STORAGE_AUX_BYTE`, `EVENT_AUX_BYTE`, `L1_MESSAGE_AUX_BYTE`, `PRECOMPILE_AUX_BYTE`. Execution happens when we pop all +elements from `storage_log_queue`. We have appropriate flags for this, which depend on each other: + +```rust +let queue_is_empty = storage_log_queue.is_empty(cs); +let execute = queue_is_empty.negated(cs); +``` + +Here, we choose flags depending on the popped element data: + +```rust +let is_storage_aux_byte = UInt8::equals(cs, &aux_byte_for_storage, &popped.0.aux_byte); +let is_event_aux_byte = UInt8::equals(cs, &aux_byte_for_event, &popped.0.aux_byte); +let is_l1_message_aux_byte = + UInt8::equals(cs, &aux_byte_for_l1_message, &popped.0.aux_byte); +let is_precompile_aux_byte = + UInt8::equals(cs, &aux_byte_for_precompile_call, &popped.0.aux_byte); + +let is_keccak_address = UInt160::equals(cs, &keccak_precompile_address, &popped.0.address); +let is_sha256_address = UInt160::equals(cs, &sha256_precompile_address, &popped.0.address); +let is_ecrecover_address = + UInt160::equals(cs, &ecrecover_precompile_address, &popped.0.address); +``` + +Put up the right flag for shards: + +```rust +let is_rollup_shard = popped.0.shard_id.is_zero(cs); +let is_porter_shard = is_rollup_shard.negated(cs); +``` + +Execute all and push them into output queues: + +```rust +let execute_rollup_storage = Boolean::multi_and(cs, &[is_storage_aux_byte, is_rollup_shard, execute]); +let execute_porter_storage = Boolean::multi_and(cs, &[is_storage_aux_byte, is_porter_shard, execute]); + +let execute_event = Boolean::multi_and(cs, &[is_event_aux_byte, execute]); +let execute_l1_message = Boolean::multi_and(cs, &[is_l1_message_aux_byte, execute]); +let execute_keccak_call = Boolean::multi_and(cs, &[is_precompile_aux_byte, is_keccak_address, execute]); +let execute_sha256_call = Boolean::multi_and(cs, &[is_precompile_aux_byte, is_sha256_address, execute]); +let execute_ecrecover_call = Boolean::multi_and(cs, &[is_precompile_aux_byte, is_ecrecover_address, execute]); + +let bitmask = [ + execute_rollup_storage, + execute_event, + execute_l1_message, + execute_keccak_call, + execute_sha256_call, + execute_ecrecover_call, +]; + +push_with_optimize( + cs, + [ + rollup_storage_queue, + events_queue, + l1_messages_queue, + keccak_calls_queue, + sha256_calls_queue, + ecdsa_calls_queue, + ], + bitmask, + popped.0, +); +``` + +Note: since we do not have a porter, the flag is automatically set to `false`: + +```rust +let boolean_false = Boolean::allocated_constant(cs, false); +Boolean::enforce_equal(cs, &execute_porter_storage, &boolean_false); +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/40.ecrecover.md b/content/10.zk-stack/10.components/60.prover/40.circuits/40.ecrecover.md new file mode 100644 index 00000000..bbae2322 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/40.ecrecover.md @@ -0,0 +1,326 @@ +--- +title: ECRecover +description: +--- + +## Ecrecover PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L9) + +```rust +pub struct PrecompileFunctionInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub initial_memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/base_structures/precompile_input_outputs/mod.rs#L42) + +```rust +pub struct PrecompileFunctionOutputData<F: SmallField> { + pub final_memory_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/input.rs#L59) + +```rust +pub struct EcrecoverCircuitFSMInputOutput<F: SmallField> { + pub log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +## Main circuit logic + +This circuit implements the ecrecover precompile described in the Ethereum yellow paper: +<https://ethereum.github.io/yellowpaper/paper.pdf> + +The purpose of ecrecover is to recover the signer’s public key from digital signature. + +A special note about this circuit is that there are hardcoded ‘valid’ field element values provided to the circuit. This +is to prevent the circuit from not satisfying in case the user-provided inputs are incorrect and, when the circuit +detects this, the bad values are swapped out for the hardcoded ones. In this event, exceptions are logged and pushed +into a vector which are returned to the caller, informing them that the provided inputs were incorrect and the result +should be discarded. + +Most of the relevant circuit logic resides in the `ecrecover_precompile_inner_routine` function. Let’s take the circuit +step by step. + +1. The circuit starts off by declaring a set of constants which are useful to have throughout the circuit. These include + the B parameter of the secp256k1 curve, the constant -1 in the curve’s base field, and the base field and scalar + field modulus. We also create the vector that should capture any exceptions. + +```rust +let curve_b = Secp256Affine::b_coeff(); + +let mut minus_one = Secp256Fq::one(); +minus_one.negate(); + +let mut curve_b_nn = + Secp256BaseNNField::<F>::allocated_constant(cs, curve_b, &base_field_params); +let mut minus_one_nn = + Secp256BaseNNField::<F>::allocated_constant(cs, minus_one, &base_field_params); + +let secp_n_u256 = U256([ + scalar_field_params.modulus_u1024.as_ref().as_words()[0], + scalar_field_params.modulus_u1024.as_ref().as_words()[1], + scalar_field_params.modulus_u1024.as_ref().as_words()[2], + scalar_field_params.modulus_u1024.as_ref().as_words()[3], +]); +let secp_n_u256 = UInt256::allocated_constant(cs, secp_n_u256); + +let secp_p_u256 = U256([ + base_field_params.modulus_u1024.as_ref().as_words()[0], + base_field_params.modulus_u1024.as_ref().as_words()[1], + base_field_params.modulus_u1024.as_ref().as_words()[2], + base_field_params.modulus_u1024.as_ref().as_words()[3], +]); +let secp_p_u256 = UInt256::allocated_constant(cs, secp_p_u256); + +let mut exception_flags = ArrayVec::<_, EXCEPTION_FLAGS_ARR_LEN>::new(); +``` + +1. Next, the circuit checks whether or not the given `x` input (which is the x-coordinate of the signature) falls within + the scalar field of the curve. Since, in ecrecover, `x = r + kn`, almost any `r` will encode a unique x-coordinate, + except for when `r > scalar_field_modulus`. If this is the case, `x = r + n`, otherwise, `x = r`. `x` is recovered + here from `r`. + +```rust +let [y_is_odd, x_overflow, ..] = + Num::<F>::from_variable(recid.get_variable()).spread_into_bits::<_, 8>(cs); + +let (r_plus_n, of) = r.overflowing_add(cs, &secp_n_u256); +let mut x_as_u256 = UInt256::conditionally_select(cs, x_overflow, &r_plus_n, &r); +let error = Boolean::multi_and(cs, &[x_overflow, of]); +exception_flags.push(error); + +// we handle x separately as it is the only element of base field of a curve (not a scalar field element!) +// check that x < q - order of base point on Secp256 curve +// if it is not actually the case - mask x to be zero +let (_res, is_in_range) = x_as_u256.overflowing_sub(cs, &secp_p_u256); +x_as_u256 = x_as_u256.mask(cs, is_in_range); +let x_is_not_in_range = is_in_range.negated(cs); +exception_flags.push(x_is_not_in_range); +``` + +1. Then, all field elements are interpreted as such within the circuit. As they are passed in, they are simply byte + arrays which are interpreted initially as `UInt256` numbers. These get converted to field elements by using the + conversion functions defined near the top of the file. Additionally, checks are done to make sure none of the passed + in field elements are zero. + +```rust +let mut x_fe = convert_uint256_to_field_element(cs, &x_as_u256, &base_field_params); + +let (mut r_fe, r_is_zero) = + convert_uint256_to_field_element_masked(cs, &r, &scalar_field_params); +exception_flags.push(r_is_zero); +let (mut s_fe, s_is_zero) = + convert_uint256_to_field_element_masked(cs, &s, &scalar_field_params); +exception_flags.push(s_is_zero); + +// NB: although it is not strictly an exception we also assume that hash is never zero as field element +let (mut message_hash_fe, message_hash_is_zero) = + convert_uint256_to_field_element_masked(cs, &message_hash, &scalar_field_params); +exception_flags.push(message_hash_is_zero); +``` + +1. Now we are going to compute `t` and check whether or not it is quadratic residue in the base field. To start, we take + `x` which we calculated before, and calculate `t` by doing `x^3 + b`, where `b` is the B parameter of the secp256k1 + curve. We check to make sure that `t` is not zero. + +```rust +let mut t = x_fe.square(cs); // x^2 +t = t.mul(cs, &mut x_fe); // x^3 +t = t.add(cs, &mut curve_b_nn); // x^3 + b + +let t_is_zero = t.is_zero(cs); +exception_flags.push(t_is_zero); +``` + +1. The Legendre symbol for `t` is computed to do a quadratic residue check. We need to compute `t^b` which corresponds + to `t^{2^255} / ( t^{2^31} * t^{2^8} * t^{2^7} * t^{2^6} * t^{2^5} * t^{2^3} * t)`. First, an array of powers of `t` + is created (up to `t^255`). Then, we multiply together all the elements in the denominator of the equation, which are + `t^{2^31} * t^{2^8} * t^{2^7} * t^{2^6} * t^{2^5} * t^{2^3} * t`. Lastly, the division is performed and we end up + with `t^b`. + +```rust +let t_is_zero = t.is_zero(cs); // We first do a zero check +exception_flags.push(t_is_zero); + +// if t is zero then just mask +let t = Selectable::conditionally_select(cs, t_is_zero, &valid_t_in_external_field, &t); + +// array of powers of t of the form t^{2^i} starting from i = 0 to 255 +let mut t_powers = Vec::with_capacity(X_POWERS_ARR_LEN); +t_powers.push(t); + +for _ in 1..X_POWERS_ARR_LEN { + let prev = t_powers.last_mut().unwrap(); + let next = prev.square(cs); + t_powers.push(next); +} + +let mut acc = t_powers[0].clone(); +for idx in [3, 5, 6, 7, 8, 31].into_iter() { + let other = &mut t_powers[idx]; + acc = acc.mul(cs, other); +} +let mut legendre_symbol = t_powers[255].div_unchecked(cs, &mut acc); +``` + +1. Before we proceed to the quadratic residue check, we take advantage of the powers we just calculated to compute the + square root of `t`, in order to determine whether the y-coordinate of the signature we’ve passed is positive or + negative. + +```rust +let mut acc_2 = t_powers[2].clone(); +for idx in [4, 5, 6, 7, 30].into_iter() { + let other = &mut t_powers[idx]; + acc_2 = acc_2.mul(cs, other); +} + +let mut may_be_recovered_y = t_powers[254].div_unchecked(cs, &mut acc_2); +may_be_recovered_y.normalize(cs); +let mut may_be_recovered_y_negated = may_be_recovered_y.negated(cs); +may_be_recovered_y_negated.normalize(cs); + +let [lowest_bit, ..] = + Num::<F>::from_variable(may_be_recovered_y.limbs[0]).spread_into_bits::<_, 16>(cs); + +// if lowest bit != parity bit, then we need conditionally select +let should_swap = lowest_bit.xor(cs, y_is_odd); +let may_be_recovered_y = Selectable::conditionally_select( + cs, + should_swap, + &may_be_recovered_y_negated, + &may_be_recovered_y, +); +``` + +1. Then, proceed with the quadratic residue check. In case `t` is nonresidue, we swap out our inputs for the hardcoded + ‘valid’ inputs. + +```rust +let t_is_nonresidue = + Secp256BaseNNField::<F>::equals(cs, &mut legendre_symbol, &mut minus_one_nn); +exception_flags.push(t_is_nonresidue); +// unfortunately, if t is found to be a quadratic nonresidue, we can't simply let x to be zero, +// because then t_new = 7 is again a quadratic nonresidue. So, in this case we let x to be 9, then +// t = 16 is a quadratic residue +let x = + Selectable::conditionally_select(cs, t_is_nonresidue, &valid_x_in_external_field, &x_fe); +let y = Selectable::conditionally_select( + cs, + t_is_nonresidue, + &valid_y_in_external_field, + &may_be_recovered_y, +); +``` + +1. The next step is computing the public key. We compute the public key `Q` by calculating `Q = (s * X - hash * G) / r`. + We can simplify this in-circuit by calculating `s / r` and `hash / r` separately, and then doing an MSM to get the + combined output. First, we pre-compute these divided field elements, and then compute the point like so: + +```rust +let mut r_fe_inversed = r_fe.inverse_unchecked(cs); +let mut s_by_r_inv = s_fe.mul(cs, &mut r_fe_inversed); +let mut message_hash_by_r_inv = message_hash_fe.mul(cs, &mut r_fe_inversed); + +s_by_r_inv.normalize(cs); +message_hash_by_r_inv.normalize(cs); + +let mut gen_negated = Secp256Affine::one(); +gen_negated.negate(); +let (gen_negated_x, gen_negated_y) = gen_negated.into_xy_unchecked(); +let gen_negated_x = + Secp256BaseNNField::allocated_constant(cs, gen_negated_x, base_field_params); +let gen_negated_y = + Secp256BaseNNField::allocated_constant(cs, gen_negated_y, base_field_params); + +let s_by_r_inv_normalized_lsb_bits: Vec<_> = s_by_r_inv + .limbs + .iter() + .map(|el| Num::<F>::from_variable(*el).spread_into_bits::<_, 16>(cs)) + .flatten() + .collect(); +let message_hash_by_r_inv_lsb_bits: Vec<_> = message_hash_by_r_inv + .limbs + .iter() + .map(|el| Num::<F>::from_variable(*el).spread_into_bits::<_, 16>(cs)) + .flatten() + .collect(); + +let mut recovered_point = (x, y); +let mut generator_point = (gen_negated_x, gen_negated_y); +// now we do multiexponentiation +let mut q_acc = + SWProjectivePoint::<F, Secp256Affine, Secp256BaseNNField<F>>::zero(cs, base_field_params); + +// we should start from MSB, double the accumulator, then conditionally add +for (cycle, (x_bit, hash_bit)) in s_by_r_inv_normalized_lsb_bits + .into_iter() + .rev() + .zip(message_hash_by_r_inv_lsb_bits.into_iter().rev()) + .enumerate() +{ + if cycle != 0 { + q_acc = q_acc.double(cs); + } + let q_plus_x = q_acc.add_mixed(cs, &mut recovered_point); + let mut q_0: SWProjectivePoint<F, Secp256Affine, NonNativeFieldOverU16<F, Secp256Fq, 17>> = + Selectable::conditionally_select(cs, x_bit, &q_plus_x, &q_acc); + + let q_plux_gen = q_0.add_mixed(cs, &mut generator_point); + let q_1 = Selectable::conditionally_select(cs, hash_bit, &q_plux_gen, &q_0); + + q_acc = q_1; +} + +let ((mut q_x, mut q_y), is_infinity) = + q_acc.convert_to_affine_or_default(cs, Secp256Affine::one()); +exception_flags.push(is_infinity); +let any_exception = Boolean::multi_or(cs, &exception_flags[..]); + +q_x.normalize(cs); +q_y.normalize(cs); +``` + +1. Now that we have our public key recovered, the last thing we will need to do is take the keccak hash of the public + key and then take the first 20 bytes to recover the address. + +```rust +let zero_u8 = UInt8::zero(cs); + +let mut bytes_to_hash = [zero_u8; 64]; +let it = q_x.limbs[..16] + .iter() + .rev() + .chain(q_y.limbs[..16].iter().rev()); + +for (dst, src) in bytes_to_hash.array_chunks_mut::<2>().zip(it) { + let limb = unsafe { UInt16::from_variable_unchecked(*src) }; + *dst = limb.to_be_bytes(cs); +} + +let mut digest_bytes = keccak256(cs, &bytes_to_hash); +// digest is 32 bytes, but we need only 20 to recover address +digest_bytes[0..12].copy_from_slice(&[zero_u8; 12]); // empty out top bytes +digest_bytes.reverse(); +``` + +1. At this point, we are basically done! What’s left now is to ensure we send a masked value in case of any exception, + and then we can output the resulting address and any exceptions which occurred for the caller to handle. This wraps + up the ecrecover circuit! + +```rust +let written_value_unmasked = UInt256::from_le_bytes(cs, digest_bytes); + +let written_value = written_value_unmasked.mask_negated(cs, any_exception); +let all_ok = any_exception.negated(cs); + +(all_ok, written_value) // Return any exceptions and the resulting address value +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/50.keccak-round-function.md b/content/10.zk-stack/10.components/60.prover/40.circuits/50.keccak-round-function.md new file mode 100644 index 00000000..883ef4f7 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/50.keccak-round-function.md @@ -0,0 +1,212 @@ +--- +title: KeccakRoundFunction +description: +--- + +## KeccakRoundFunction PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L9) + +```rust +pub struct PrecompileFunctionInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub initial_memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/base_structures/precompile_input_outputs/mod.rs#L42) + +```rust +pub struct PrecompileFunctionOutputData<F: SmallField> { + pub final_memory_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/input.rs#L59) + +```rust +pub struct Keccak256RoundFunctionFSMInputOutput<F: SmallField> { + pub internal_fsm: Keccak256RoundFunctionFSM<F>, + pub log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub memory_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} + +pub struct Keccak256RoundFunctionFSM<F: SmallField> { + pub read_precompile_call: Boolean<F>, + pub read_unaligned_words_for_round: Boolean<F>, + pub completed: Boolean<F>, + pub keccak_internal_state: [[[UInt8<F>; BYTES_PER_WORD]; LANE_WIDTH]; LANE_WIDTH], + pub timestamp_to_use_for_read: UInt32<F>, + pub timestamp_to_use_for_write: UInt32<F>, + pub precompile_call_params: Keccak256PrecompileCallParams<F>, + pub u8_words_buffer: [UInt8<F>; BYTES_BUFFER_SIZE], + pub u64_words_buffer_markers: [Boolean<F>; BUFFER_SIZE_IN_U64_WORDS], +} +``` + +## Main circuit logic + +Keccak is a precompile for the keccak hash function, and is responsible for hashing any input data sent in by contract +executions. Roughly speaking, the keccak circuit will receive metadata about queued up precompile calls, and ensure that +the first-in-line call is indeed a call to the keccak precompile. The circuit then collects some metadata about the call +itself, which tells the circuit at which memory position the input can be found, and at which memory position the output +should be written, along with some peripheral data like the timestamp of the hash. + +Next, the circuit will take data from another queue, which contains memory queries. This will give the circuit witnesses +to push into the keccak buffer. + +Learn more about Keccak here: <https://keccak.team/keccak.html>. + +### First part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/mod.rs#L423) + +The circuit begins with allocating input part of the PI. + +```rust +let Keccak256RoundFunctionCircuitInstanceWitness { + closed_form_input, + requests_queue_witness, + memory_reads_witness, +} = witness; + +let mut structured_input = Keccak256RoundFunctionCircuitInputOutput::alloc_ignoring_outputs( + cs, + closed_form_input.clone(), +); +``` + +We chose what `memory_queue` state and `log_queue` state to continue to work with. + +```rust +let requests_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &requests_queue_state_from_input, + &requests_queue_state_from_fsm, +); + +let memory_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &memory_queue_state_from_input, + &memory_queue_state_from_fsm, +); +``` + +We do the same with inner FSM part. + +```rust +let initial_state = Keccak256RoundFunctionFSM::conditionally_select( + cs, + start_flag, + &starting_fsm_state, + &structured_input.hidden_fsm_input.internal_fsm, +); +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/mod.rs#L114) + +Our main cycle starts with getting a new precompile request from the queue. + +```rust +let (precompile_call, _) = precompile_calls_queue.pop_front(cs, state.read_precompile_call); +``` + +We check that fields are correct. + +```rust +Num::conditionally_enforce_equal( + cs, + state.read_precompile_call, + &Num::from_variable(precompile_call.aux_byte.get_variable()), + &Num::from_variable(aux_byte_for_precompile.get_variable()), +); +for (a, b) in precompile_call + .address + .inner + .iter() + .zip(precompile_address.inner.iter()) +{ + Num::conditionally_enforce_equal( + cs, + state.read_precompile_call, + &Num::from_variable(a.get_variable()), + &Num::from_variable(b.get_variable()), + ); +} +``` + +Also, we prepare some additional information for the call. + +```rust +state.precompile_call_params = Keccak256PrecompileCallParams::conditionally_select( + cs, + state.read_precompile_call, + &call_params, + &state.precompile_call_params, +); +... +``` + +Then we do some memory queries to read data that needed to be hashed. + +```rust +let read_query = MemoryQuery { + timestamp: state.timestamp_to_use_for_read, + memory_page: state.precompile_call_params.input_page, + index: state.precompile_call_params.input_offset, + rw_flag: boolean_false, + is_ptr: boolean_false, + value: read_query_value, +}; + +memory_queue.push(cs, read_query, should_read); +``` + +After some another preparations, we are ready to create a full input. + +```rust +let mut input = [zero_u8; keccak256::KECCAK_RATE_BYTES]; + input.copy_from_slice(&state.u8_words_buffer[..keccak256::KECCAK_RATE_BYTES]); +``` + +And run the round function. + +```rust +let squeezed = + keccak256_absorb_and_run_permutation(cs, &mut state.keccak_internal_state, &input); +``` + +Now, if it was the last round, we can make a write memory query of the result. + +```rust +let write_query = MemoryQuery { + timestamp: state.timestamp_to_use_for_write, + memory_page: state.precompile_call_params.output_page, + index: state.precompile_call_params.output_offset, + rw_flag: boolean_true, + is_ptr: boolean_false, + value: result, +}; + +memory_queue.push(cs, write_query, write_result); +``` + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/keccak256_round_function/mod.rs#L495) + +Now we update PI output parts and compute a commitment. Then we allocate it as public variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/60.l1-messages-hasher.md b/content/10.zk-stack/10.components/60.prover/40.circuits/60.l1-messages-hasher.md new file mode 100644 index 00000000..05cb7a90 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/60.l1-messages-hasher.md @@ -0,0 +1,157 @@ +--- +title: L1MessagesHasher +description: +--- + +## L1MessagesHasher PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/input.rs#L27) + +```rust +pub struct LinearHasherInputData<F: SmallField> { + pub queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/input.rs#L42) + +```rust +pub struct LinearHasherOutputData<F: SmallField> { + pub keccak256_hash: [UInt8<F>; 32], +} +``` + +### FSM Input and FSM Output + +```rust +() // this circuit has big capacity, so we don't need several instances +``` + +## Main circuit logic + +It takes a queue of L1 messages and hash everything with keccak. + +The main logic is implemented in `linear_hasher_entry_point` function +[here](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/mod.rs#L35). + +It can be spited into 3 parts: + +### First part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/mod.rs#L54) + +Firstly, we allocate the “input” part of PI (`start flag`, `Input` and `FSM Input`): + +```rust +let mut structured_input = + LinearHasherInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); + +let start_flag = structured_input.start_flag; +let queue_state_from_input = structured_input.observable_input.queue_state; + +let mut queue = StorageLogQueue::<F, R>::from_state(cs, queue_state_from_input); +let queue_witness = CircuitQueueWitness::from_inner_witness(queue_witness); +queue.witness = Arc::new(queue_witness); +``` + +Also, we do some checks for them and allocate empty hash state: + +```rust +let keccak_accumulator_state = + [[[zero_u8; keccak256::BYTES_PER_WORD]; keccak256::LANE_WIDTH]; keccak256::LANE_WIDTH]; + +let mut keccak_accumulator_state = + keccak_accumulator_state.map(|el| el.map(|el| el.map(|el| el.get_variable()))); +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/mod.rs#L105) + +This part is the main one. We run a loop with some limit, where on each iteration we try to pop the next element from +the queue, if it’s not empty. + +```rust +let queue_is_empty = queue.is_empty(cs); +let should_pop = queue_is_empty.negated(cs); + +let (storage_log, _) = queue.pop_front(cs, should_pop); +``` + +Then we absorb it to the buffer, and if it’s full we run a round function. + +```rust +if buffer.len() >= 136 { + let buffer_for_round: [UInt8<F>; KECCAK_RATE_BYTES] = buffer[..136].try_into().unwrap(); + let buffer_for_round = buffer_for_round.map(|el| el.get_variable()); + let carry_on = buffer[136..].to_vec(); + + buffer = carry_on; + + // absorb if we are not done yet + keccak256_conditionally_absorb_and_run_permutation( + cs, + continue_to_absorb, + &mut keccak_accumulator_state, + &buffer_for_round, + ); +} +``` + +If this element was the last one, we create a padding and run a round function. + +```rust +if tail_len == KECCAK_RATE_BYTES - 1 { + // unreachable, but we set it for completeness + last_round_buffer[tail_len] = UInt8::allocated_constant(cs, 0x81); +} else { + last_round_buffer[tail_len] = UInt8::allocated_constant(cs, 0x01); + last_round_buffer[KECCAK_RATE_BYTES - 1] = UInt8::allocated_constant(cs, 0x80); +} + +let last_round_buffer = last_round_buffer.map(|el| el.get_variable()); + +// absorb if it's the last round +keccak256_conditionally_absorb_and_run_permutation( + cs, + absorb_as_last_round, + &mut keccak_accumulator_state, + &last_round_buffer, +); +``` + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/linear_hasher/mod.rs#L169) + +Firstly, we verify that the queue is empty now. + +```rust +let completed = queue.is_empty(cs); +Boolean::enforce_equal(cs, &completed, &boolean_true); +``` + +Then we compute the final hash and create an output. + +```rust +// squeeze +let mut keccak256_hash = [MaybeUninit::<UInt8<F>>::uninit(); keccak256::KECCAK256_DIGEST_SIZE]; +for (i, dst) in keccak256_hash.array_chunks_mut::<8>().enumerate() { + for (dst, src) in dst.iter_mut().zip(keccak_accumulator_state[i][0].iter()) { + let tmp = unsafe { UInt8::from_variable_unchecked(*src) }; + dst.write(tmp); + } +} + +let mut observable_output = LinearHasherOutputData::placeholder(cs); +observable_output.keccak256_hash = keccak256_hash; +``` + +Finally, we compute a commitment to PI and allocate it as witness variables. + +```rust +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/70.log-sorter.md b/content/10.zk-stack/10.components/60.prover/40.circuits/70.log-sorter.md new file mode 100644 index 00000000..742d5203 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/70.log-sorter.md @@ -0,0 +1,357 @@ +--- +title: LogSorter +description: +--- + +`LogSorter` is one circuit that is used as both `EventsSorter` and `L1MessagesSorter`. + +## LogSorter PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L57) + +```rust +pub struct EventsDeduplicatorInputData<F: SmallField> { + pub initial_log_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L74) + +```rust +pub struct EventsDeduplicatorOutputData<F: SmallField> { + pub final_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/log_sorter/input.rs#L28) + +```rust +pub struct EventsDeduplicatorFSMInputOutput<F: SmallField> { + pub lhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub rhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub initial_unsorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub intermediate_sorted_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub final_result_queue_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub previous_key: UInt32<F>, + pub previous_item: LogQuery<F>, +} +``` + +## Main circuit logic + +The main logic of this circuit is sorting and deduplicating logs from `initial_log_queue_state`. The result is pushed to +`final_queue_state`. + +With sorting, we get 2 queues – a simple one, and a sorted one. + +We start with the witness allocation: + +```rust +let mut structured_input = + EventsDeduplicatorInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); +``` + +Now the scheme is familiar. + +Check if we didn't take elements from the queue: + +```rust +unsorted_queue_from_passthrough_state.enforce_trivial_head(cs); +``` + +Judging by the flag, we choose a queue: + +```rust +let state = QueueState::conditionally_select( + cs, + structured_input.start_flag, + &unsorted_queue_from_passthrough_state, + &unsorted_queue_from_fsm_input_state, + ); +``` + +Wrap the state and witnesses for it in `StorageLogQueue`, thereby preparing the input data for `inner`: + +```rust +let mut unsorted_queue = StorageLogQueue::<F, R>::from_state(cs, state); + + use std::sync::Arc; + let initial_queue_witness = CircuitQueueWitness::from_inner_witness(initial_queue_witness); + unsorted_queue.witness = Arc::new(initial_queue_witness); + + let intermediate_sorted_queue_from_passthrough_state = structured_input + .observable_input + .intermediate_sorted_queue_state; +``` + +For `sorted_queue`, it is the same procedure. + +We generate challenges and accumulators for the permutation argument. A detailed explanation can be found +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +```rust +let challenges = crate::utils::produce_fs_challenges::< + F, + CS, + R, + QUEUE_STATE_WIDTH, + { MEMORY_QUERY_PACKED_WIDTH + 1 }, + DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS, + >( + cs, + structured_input + .observable_input + .initial_log_queue_state + .tail, + structured_input + .observable_input + .intermediate_sorted_queue_state + .tail, + round_function, + ); +``` + +Again, if it is not the rest cycle (`start_flag == false`), we should choose fsm: + +```rust +let one = Num::allocated_constant(cs, F::ONE); +let initial_lhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.lhs_accumulator, +); + +let initial_rhs = Num::parallel_select( + cs, + structured_input.start_flag, + &[one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &structured_input.hidden_fsm_input.rhs_accumulator, +); +``` + +Depending on the flag, we prepare all the information for `inner` part: + +```rust +let zero_u32 = UInt32::zero(cs); +let previous_key = UInt32::conditionally_select( + cs, + structured_input.start_flag, + &zero_u32, + &structured_input.hidden_fsm_input.previous_key, +); +``` + +```rust +let empty_storage = LogQuery::placeholder(cs); +let previous_item = LogQuery::conditionally_select( + cs, + structured_input.start_flag, + &empty_storage, + &structured_input.hidden_fsm_input.previous_item, +); +``` + +After `inner` part we check `unsorted_queue` and `intermediate_sorted_queue`.: + +```rust +let unsorted_is_empty = unsorted_queue.is_empty(cs); +let sorted_is_empty = intermediate_sorted_queue.is_empty(cs); + +Boolean::enforce_equal(cs, &unsorted_is_empty, &sorted_is_empty); +``` + +We check that permutation accumulators are equal and if the queues are already empty: + +```rust +let completed = unsorted_queue.length.is_zero(cs); + for (lhs, rhs) in new_lhs.iter().zip(new_rhs.iter()) { + Num::conditionally_enforce_equal(cs, completed, lhs, rhs); + } +``` + +Finally, we compute a commitment to PublicInput and allocate it as witness variables. + +```rust +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` + +### Inner part + +Note: we have specific logic for rollback. When we have an event of some function and then that function makes a return +then we should cancel this event. Inside the VM, we create exactly the same event: same key, block number, timestamp, +etc. the only change is that the rollback flag is now true. In the inner part, first sort and look for these pairs and +self-destruct them. + +There are two cases: when `unsorted_queue` is empty, but it's the only circuit, in this case. Otherwise, we continue, +and then it's not trivial. + +```rust +let no_work = unsorted_queue.is_empty(cs); +let mut previous_is_trivial = Boolean::multi_or(cs, &[no_work, is_start]); +``` + +Additional checks for length. We should always check whether the sorted queue and the normal queue are of the same +length. + +```rust +let unsorted_queue_length = Num::from_variable(unsorted_queue.length.get_variable()); +let intermediate_sorted_queue_length = + Num::from_variable(intermediate_sorted_queue.length.get_variable()); + +Num::enforce_equal( + cs, + &unsorted_queue_length, + &intermediate_sorted_queue_length, +); +``` + +We can pop elements if unsorted_queue is empty. That’s why every time we set up the flags `original_is_empty`, +`sorted_is_empty`. We also ensure that items are "write" unless it's a padding. + +```rust +let original_is_empty = unsorted_queue.is_empty(cs); +let sorted_is_empty = intermediate_sorted_queue.is_empty(cs); +Boolean::enforce_equal(cs, &original_is_empty, &sorted_is_empty); + +let should_pop = original_is_empty.negated(cs); +let is_trivial = original_is_empty; + +let (_, original_encoding) = unsorted_queue.pop_front(cs, should_pop); +let (sorted_item, sorted_encoding) = intermediate_sorted_queue.pop_front(cs, should_pop); +``` + +The next block of code is sorting. You can find the main idea +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +Check if keys are equal and check a value. We compare timestamps and then resolve logic over rollbacks, so the only way +when keys are equal can be when we do a rollback. Ensure sorting for uniqueness timestamp and rollback flag. We know +that timestamps are unique across logs, and are also the same between write and rollback. Keys are always ordered no +matter what, and are never equal unless it's padding: + +```rust +let sorting_key = sorted_item.timestamp; +let (keys_are_equal, new_key_is_smaller) = + unpacked_long_comparison(cs, &[previous_key], &[sorting_key]); +new_key_is_smaller.conditionally_enforce_false(cs, should_pop); +``` + +There are only two cases when keys are equal: + +- it's a padding element +- it's a rollback + +It's enough to compare timestamps, as the VM circuit guarantees uniqueness if it's not a padding. Now ensure sorting: + +```rust +let previous_is_not_rollback = previous_item.rollback.negated(cs); +let enforce_sequential_rollback = Boolean::multi_and( + cs, + &[previous_is_not_rollback, sorted_item.rollback, should_pop], +); +keys_are_equal.conditionally_enforce_true(cs, enforce_sequential_rollback); + +let same_log = UInt32::equals(cs, &sorted_item.timestamp, &previous_item.timestamp); + +let values_are_equal = + UInt256::equals(cs, &sorted_item.written_value, &previous_item.written_value); + +let negate_previous_is_trivial = previous_is_trivial.negated(cs); +let should_enforce = Boolean::multi_and(cs, &[same_log, negate_previous_is_trivial]); + +values_are_equal.conditionally_enforce_true(cs, should_enforce); + +let this_item_is_non_trivial_rollback = + Boolean::multi_and(cs, &[sorted_item.rollback, should_pop]); +let negate_previous_item_rollback = previous_item.rollback.negated(cs); +let previous_item_is_non_trivial_write = Boolean::multi_and( + cs, + &[negate_previous_item_rollback, negate_previous_is_trivial], +); +let is_sequential_rollback = Boolean::multi_and( + cs, + &[ + this_item_is_non_trivial_rollback, + previous_item_is_non_trivial_write, + ], +); +same_log.conditionally_enforce_true(cs, is_sequential_rollback); +``` + +Decide if we should add the previous into the queue. We add only if the previous one is not trivial, it had a different +key, and it wasn't rolled back: + +```rust +let negate_same_log = same_log.and(cs, should_pop).negated(cs); +let add_to_the_queue = Boolean::multi_and( + cs, + &[ + negate_previous_is_trivial, + negate_same_log, + negate_previous_item_rollback, + ], +); +``` + +Further, we don't need in our `LogQueue` some fields, so we just clean up: + +```rust +let boolean_false = Boolean::allocated_constant(cs, false); +let query_to_add = LogQuery { + address: previous_item.address, + key: previous_item.key, + read_value: UInt256::zero(cs), + written_value: previous_item.written_value, + rw_flag: boolean_false, + aux_byte: UInt8::zero(cs), + rollback: boolean_false, + is_service: previous_item.is_service, + shard_id: previous_item.shard_id, + tx_number_in_block: previous_item.tx_number_in_block, + timestamp: UInt32::zero(cs), +}; +``` + +Finalization step - same way, check if the last item is not a rollback: + +```rust +let now_empty = unsorted_queue.is_empty(cs); + +let negate_previous_is_trivial = previous_is_trivial.negated(cs); +let negate_previous_item_rollback = previous_item.rollback.negated(cs); +let add_to_the_queue = Boolean::multi_and( + cs, + &[ + negate_previous_is_trivial, + negate_previous_item_rollback, + now_empty, + ], +); +let boolean_false = Boolean::allocated_constant(cs, false); +let query_to_add = LogQuery { + address: previous_item.address, + key: previous_item.key, + read_value: UInt256::zero(cs), + written_value: previous_item.written_value, + rw_flag: boolean_false, + aux_byte: UInt8::zero(cs), + rollback: boolean_false, + is_service: previous_item.is_service, + shard_id: previous_item.shard_id, + tx_number_in_block: previous_item.tx_number_in_block, + timestamp: UInt32::zero(cs), +}; + +result_queue.push(cs, query_to_add, add_to_the_queue); + +unsorted_queue.enforce_consistency(cs); +intermediate_sorted_queue.enforce_consistency(cs); +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/80.main-vm.md b/content/10.zk-stack/10.components/60.prover/40.circuits/80.main-vm.md new file mode 100644 index 00000000..f51fe9cd --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/80.main-vm.md @@ -0,0 +1,348 @@ +--- +title: Main VM +description: +--- + +## MainVm PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L9) + +```rust +pub struct VmInputData<F: SmallField> { + pub rollback_queue_tail_for_block: [Num<F>; QUEUE_STATE_WIDTH], + pub memory_queue_initial_state: QueueTailState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub decommitment_queue_initial_state: QueueTailState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub per_block_context: GlobalContext<F>, +} +``` + +### Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/fsm_input_output/circuit_inputs/main_vm.rs#L33) + +```rust +pub struct VmOutputData<F: SmallField> { + pub log_queue_final_state: QueueState<F, QUEUE_STATE_WIDTH>, + pub memory_queue_final_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub decommitment_queue_final_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, +} +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/base_structures/vm_state/mod.rs#L92) + +```rust +pub struct VmLocalState<F: SmallField> { + pub previous_code_word: UInt256<F>, + pub registers: [VMRegister<F>; REGISTERS_COUNT], + pub flags: ArithmeticFlagsPort<F>, + pub timestamp: UInt32<F>, + pub memory_page_counter: UInt32<F>, + pub tx_number_in_block: UInt32<F>, + pub previous_code_page: UInt32<F>, + pub previous_super_pc: UInt16<F>, + pub pending_exception: Boolean<F>, + pub ergs_per_pubdata_byte: UInt32<F>, + pub callstack: Callstack<F>, + pub memory_queue_state: [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + pub memory_queue_length: UInt32<F>, + pub code_decommittment_queue_state: [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + pub code_decommittment_queue_length: UInt32<F>, + pub context_composite_u128: [UInt32<F>; 4], +} +``` + +## Main circuit logic + +Main_vm – is instruction handler. VM circuit only accumulated memory queries using WITNESS provided by (presumably +honest) prover. In this sense VM is “local” - it doesn’t have access to full memory space, but only to values of +particular queries that it encountered during the execution. RAM circuit sorts all accumulated queries from VM and +ENFORCES the general RAM validity as described above. Those two actions together guarantee RAM validity, so for all the +descriptions below when we will talk about particular opcodes in VM we will use a language like “Operand number 0 is +read from the stack at the offset X” that means that even though such “memory read” technically means using a witness +provided by the prover, in practice we can assume that such witness is correct and we can view it as just normal RAM +access as one would expect to happen on the standard machine. + +We start with the allocation witnesses: + +```rust +let VmCircuitWitness { + closed_form_input, + witness_oracle, + } = witness; + + let mut structured_input = + VmCircuitInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); + + let start_flag = structured_input.start_flag; + let observable_input = structured_input.observable_input.clone(); + let hidden_fsm_input = structured_input.hidden_fsm_input.clone(); + + let VmInputData { + rollback_queue_tail_for_block, + memory_queue_initial_state, + decommitment_queue_initial_state, + per_block_context, + } = observable_input; +``` + +We also need to create the state that reflects the "initial" state for boot process: + +```rust +let bootloader_state = initial_bootloader_state( + cs, + memory_queue_initial_state.length, + memory_queue_initial_state.tail, + decommitment_queue_initial_state.length, + decommitment_queue_initial_state.tail, + rollback_queue_tail_for_block, + round_function, + ); +``` + +but depending from `start_flag` we should select between states: + +```rust +let mut state = + VmLocalState::conditionally_select(cs, start_flag, &bootloader_state, &hidden_fsm_input); + +let synchronized_oracle = SynchronizedWitnessOracle::new(witness_oracle); +``` + +Here we run the `vm_cycle` : + +```rust +for _cycle_idx in 0..limit { + state = vm_cycle( + cs, + state, + &synchronized_oracle, + &per_block_context, + round_function, + ); + } +``` + +The VM runs in cycles. For each cycle, + +1. Start in a prestate - perform all common operations for every opcode, namely deal with exceptions, resources, edge + cases like end of execution, select opcodes, compute common values. Within the zkEVM framework, numerous entities + identified as "opcodes" in the EVM paradigm are elegantly manifested as mere function calls. This modification is + rooted in the succinct observation that, from the perspective of an external caller, an inlined function (analogous + to an opcode) is inherently indistinguishable from an internal function call. + +```rust +let (draft_next_state, common_opcode_state, opcode_carry_parts) = + create_prestate(cs, current_state, witness_oracle, round_function); +``` + +1. Compute state diffs for every opcode. List of opcodes: + +```rust +pub enum Opcode { + Invalid(InvalidOpcode), + Nop(NopOpcode), + Add(AddOpcode), + Sub(SubOpcode), + Mul(MulOpcode), + Div(DivOpcode), + Jump(JumpOpcode), + Context(ContextOpcode), + Shift(ShiftOpcode), + Binop(BinopOpcode), + Ptr(PtrOpcode), + NearCall(NearCallOpcode), + Log(LogOpcode), + FarCall(FarCallOpcode), + Ret(RetOpcode), + UMA(UMAOpcode), +} +``` + +VM cycle calls such functions for different class of opcodes: nop, add_sup, jump, bind, context, ptr, log, +calls_and_ret, mul_div. + +Here we briefly mention all opcodes defined in the system. Each logical "opcode" comes with modifiers, categorized into +"exclusive" modifiers (where only one can be applied) and "flags" or "non-exclusive" modifiers (where multiple can be +activated simultaneously). The number of permissible "flags" can vary depending on the specific "exclusive" modifier +chosen. All data from opcodes we write to StateDiffsAccumulator: + +```rust +pub struct StateDiffsAccumulator<F: SmallField> { + // dst0 candidates + pub dst_0_values: Vec<(bool, Boolean<F>, VMRegister<F>)>, + // dst1 candidates + pub dst_1_values: Vec<(Boolean<F>, VMRegister<F>)>, + // flags candidates + pub flags: Vec<(Boolean<F>, ArithmeticFlagsPort<F>)>, + // specific register updates + pub specific_registers_updates: [Vec<(Boolean<F>, VMRegister<F>)>; REGISTERS_COUNT], + // zero out specific registers + pub specific_registers_zeroing: [Vec<Boolean<F>>; REGISTERS_COUNT], + // remove ptr markers on specific registers + pub remove_ptr_on_specific_registers: [Vec<Boolean<F>>; REGISTERS_COUNT], + // pending exceptions, to be resolved next cycle. Should be masked by opcode applicability already + pub pending_exceptions: Vec<Boolean<F>>, + // ergs left, PC + // new ergs left if it's not one available after decoding + pub new_ergs_left_candidates: Vec<(Boolean<F>, UInt32<F>)>, + // new PC in case if it's not just PC+1 + pub new_pc_candidates: Vec<(Boolean<F>, UInt16<F>)>, + // other meta parameters of VM + pub new_tx_number: Option<(Boolean<F>, UInt32<F>)>, + pub new_ergs_per_pubdata: Option<(Boolean<F>, UInt32<F>)>, + // memory bounds + pub new_heap_bounds: Vec<(Boolean<F>, UInt32<F>)>, + pub new_aux_heap_bounds: Vec<(Boolean<F>, UInt32<F>)>, + // u128 special register, one from context, another from call/ret + pub context_u128_candidates: Vec<(Boolean<F>, [UInt32<F>; 4])>, + // internal machinery + pub callstacks: Vec<(Boolean<F>, Callstack<F>)>, + // memory page counter + pub memory_page_counters: Option<UInt32<F>>, + // decommittment queue + pub decommitment_queue_candidates: Option<( + Boolean<F>, + UInt32<F>, + [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + )>, + // memory queue + pub memory_queue_candidates: Vec<( + Boolean<F>, + UInt32<F>, + [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + )>, + // forward piece of log queue + pub log_queue_forward_candidates: Vec<(Boolean<F>, UInt32<F>, [Num<F>; QUEUE_STATE_WIDTH])>, + // rollback piece of log queue + pub log_queue_rollback_candidates: Vec<(Boolean<F>, UInt32<F>, [Num<F>; QUEUE_STATE_WIDTH])>, + // sponges to run. Should not include common sponges for src/dst operands + pub sponge_candidates_to_run: Vec<( + bool, + bool, + Boolean<F>, + ArrayVec< + ( + Boolean<F>, + [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + [Num<F>; FULL_SPONGE_QUEUE_STATE_WIDTH], + ), + MAX_SPONGES_PER_CYCLE, + >, + )>, + // add/sub relations to enforce + pub add_sub_relations: Vec<( + Boolean<F>, + ArrayVec<AddSubRelation<F>, MAX_ADD_SUB_RELATIONS_PER_CYCLE>, + )>, + // mul/div relations to enforce + pub mul_div_relations: Vec<( + Boolean<F>, + ArrayVec<MulDivRelation<F>, MAX_MUL_DIV_RELATIONS_PER_CYCLE>, + )>, +} +``` + +There will be no implementation details here because the code is commented step by step and is understandable. Short +description: + +Apply opcodes, for DST0 it's possible to have opcode-constrainted updates only into registers, apply +`StateDiffsAccumulator`, update the memory, update the registers, apply changes to VM state, such as ergs left, etc. +push data to queues for other circuits. If an event has rollback then create the same event data but with `rollback` +flag, enforce sponges. There are only 2 outcomes: + +- we have dst0 write (and may be src0 read), that we taken care above +- opcode itself modified memory queue, based on outcome of src0 read in parallel opcodes either +- do not use sponges and only rely on src0/dst0 +- can not have src0/dst0 in memory, but use sponges (UMA, near_call, far call, ret) + +No longer in the cyclical part `VM` we Setup different queues: + +1. Memory: + +```rust +let memory_queue_current_tail = QueueTailState { + tail: final_state.memory_queue_state, + length: final_state.memory_queue_length, + }; +let memory_queue_final_tail = QueueTailState::conditionally_select( + cs, + structured_input.completion_flag, + &memory_queue_current_tail, + &full_empty_state_large.tail, +); +``` + +1. Code decommit: + +```rust +let decommitment_queue_current_tail = QueueTailState { + tail: final_state.code_decommittment_queue_state, + length: final_state.code_decommittment_queue_length, + }; +let decommitment_queue_final_tail = QueueTailState::conditionally_select( + cs, + structured_input.completion_flag, + &decommitment_queue_current_tail, + &full_empty_state_large.tail, +); +``` + +1. Log: + +```rust +let final_log_state_tail = final_state.callstack.current_context.log_queue_forward_tail; + let final_log_state_length = final_state + .callstack + .current_context + .log_queue_forward_part_length; + +// but we CAN still check that it's potentially mergeable, basically to check that witness generation is good +for (a, b) in final_log_state_tail.iter().zip( + final_state + .callstack + .current_context + .saved_context + .reverted_queue_head + .iter(), +) { + Num::conditionally_enforce_equal(cs, structured_input.completion_flag, a, b); +} +let full_empty_state_small = QueueState::<F, QUEUE_STATE_WIDTH>::empty(cs); + +let log_queue_current_tail = QueueTailState { + tail: final_log_state_tail, + length: final_log_state_length, +}; +let log_queue_final_tail = QueueTailState::conditionally_select( + cs, + structured_input.completion_flag, + &log_queue_current_tail, + &full_empty_state_small.tail, +); +``` + +Wrap them: + +```rust +observable_output.log_queue_final_state.tail = log_queue_final_tail; +observable_output.memory_queue_final_state.tail = memory_queue_final_tail; +observable_output.decommitment_queue_final_state.tail = decommitment_queue_final_tail; + +structured_input.observable_output = observable_output; +structured_input.hidden_fsm_output = final_state; +``` + +Finally, we compute a commitment to PublicInput and allocate it as witness variables. + +```rust +let compact_form = + ClosedFormInputCompactForm::from_full_form(cs, &structured_input, round_function); + +let input_commitment: [_; INPUT_OUTPUT_COMMITMENT_LENGTH] = + commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/90.ram-permutation.md b/content/10.zk-stack/10.components/60.prover/40.circuits/90.ram-permutation.md new file mode 100644 index 00000000..d4eb427e --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/90.ram-permutation.md @@ -0,0 +1,203 @@ +--- +title: RAMPermutation +description: +--- + +## RAMPermutation PI + +### Input +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/input.rs#L27) + +```rust +pub struct RamPermutationInputData<F: SmallField> { + pub unsorted_queue_initial_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub sorted_queue_initial_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub non_deterministic_bootloader_memory_snapshot_length: UInt32<F>, +} +``` + +### Output + +```rust +() +``` + +### FSM Input and FSM Output +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/input.rs#L52) + +```rust +pub struct RamPermutationFSMInputOutput<F: SmallField> { + pub lhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub rhs_accumulator: [Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + pub current_unsorted_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub current_sorted_queue_state: QueueState<F, FULL_SPONGE_QUEUE_STATE_WIDTH>, + pub previous_sorting_key: [UInt32<F>; RAM_SORTING_KEY_LENGTH], + pub previous_full_key: [UInt32<F>; RAM_FULL_KEY_LENGTH], + pub previous_value: UInt256<F>, + pub previous_is_ptr: Boolean<F>, + pub num_nondeterministic_writes: UInt32<F>, +} +``` + +## Main circuit logic + +The circuit starts [here](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/mod.rs#L30). +This function allocate PI inputs that call the inner function, where all the main logic is implemented. In the end, it +forms the fsm output and compute PI commitment. The main purpose of this circuit is enforcing that memory queries are +executed correctly. + +### First part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/mod.rs#L43) + +We start, as usually, with allocating input fields from PI. + +```rust +let RamPermutationCircuitInstanceWitness { + closed_form_input, + unsorted_queue_witness, + sorted_queue_witness, +} = closed_form_input_witness; + +let mut structured_input = + RamPermutationCycleInputOutput::alloc_ignoring_outputs(cs, closed_form_input.clone()); + +let start_flag = structured_input.start_flag; +let observable_input = structured_input.observable_input.clone(); +let hidden_fsm_input = structured_input.hidden_fsm_input.clone(); +``` + +Some field, like `unsorted_queue_initial_state` and `current_unsorted_queue_state` represents the same value. So we +should decide whether we take a new queue from `Input` or continue working with current one from `FSM Input`. We use +`start_flag` for such purpose. + +```rust +let unsorted_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &observable_input.unsorted_queue_initial_state, + &hidden_fsm_input.current_unsorted_queue_state, +); + +let sorted_queue_state = QueueState::conditionally_select( + cs, + start_flag, + &observable_input.sorted_queue_initial_state, + &hidden_fsm_input.current_sorted_queue_state, +); +``` + +Also, we generate challenges and accumulators for permutation argument. The detailed explanation can be found +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +```rust +let fs_challenges = crate::utils::produce_fs_challenges( + cs, + observable_input.unsorted_queue_initial_state.tail, + observable_input.sorted_queue_initial_state.tail, + round_function, +); + +let mut lhs = <[Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS]>::conditionally_select( + cs, + start_flag, + &[num_one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &hidden_fsm_input.lhs_accumulator, +); +let mut rhs = <[Num<F>; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS]>::conditionally_select( + cs, + start_flag, + &[num_one; DEFAULT_NUM_PERMUTATION_ARGUMENT_REPETITIONS], + &hidden_fsm_input.rhs_accumulator, +); +``` + +### Main part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/mod.rs#L211) + +We call the inner function, where the main logic is implemented. + +Firstly, we check non-deterministic writes. These should be in the beginning of `sorted_queue`. We also count the number +of such writes. + +```rust +let is_nondeterministic_write = Boolean::multi_and( + cs, + &[ + can_pop, + ts_is_zero, + page_is_bootloader_heap, + is_write, + not_ptr, + ], +); + +*num_nondeterministic_writes = UInt32::conditionally_select( + cs, + is_nondeterministic_write, + &num_nondeterministic_writes_incremented, + &num_nondeterministic_writes, +); +``` + +For every new memory query from `sorted_queue` we enforce sorting by (`memory_page`, `index` and `timestamp`). + +```rust +let sorting_key = [ + sorted_item.timestamp, + sorted_item.index, + sorted_item.memory_page, + ]; + +let (_keys_are_equal, previous_key_is_smaller) = + unpacked_long_comparison(cs, &sorting_key, previous_sorting_key); +``` + +Then, if the query is read one, we have two cases: + +- should enforce that the value is the same as in the previous value, if it has the same `memory_page` and `index` +- should enforce that the value is zero otherwise + +```rust +let value_equal = UInt256::equals(cs, &sorted_item.value, &previous_element_value); +let value_is_zero = UInt256::equals(cs, &sorted_item.value, &uint256_zero); +``` + +In the end, we compute permutation argument contributions to accumulators. The code is +[here](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/mod.rs#L363). The detailed +explanation can be found +[here](https://github.com/code-423n4/2023-10-zksync/blob/c3ff020df5d11fe91209bd99d7fb0ec1272dc387/docs/Circuits%20Section/Circuits/Sorting.md). + +### Final part +[GitHub](%%zk_git_repo_era-zkevm_circuits%%/blob/main/src/ram_permutation/mod.rs#L159) + +If the queues are empty now, that means that this instance should be the last one. + +```rust +let completed = unsorted_queue.length.is_zero(cs); +``` + +If so, we should check that permutation argument accumulators are equal and number of nondeterministic writes is +correct. + +```rust +for (lhs, rhs) in lhs.iter().zip(rhs.iter()) { + Num::conditionally_enforce_equal(cs, completed, lhs, rhs); +} + +let num_nondeterministic_writes_equal = UInt32::equals( + cs, + &num_nondeterministic_writes, + &observable_input.non_deterministic_bootloader_memory_snapshot_length, +); +num_nondeterministic_writes_equal.conditionally_enforce_true(cs, completed); +``` + +Finally, we form the output part of PI and compute a commitment to PI and allocate it as witness variables. + +```rust +let input_commitment = commit_variable_length_encodable_item(cs, &compact_form, round_function); +for el in input_commitment.iter() { + let gate = PublicInputGate::new(el.get_variable()); + gate.add_to_cs(cs); +} +``` diff --git a/content/10.zk-stack/10.components/60.prover/40.circuits/_dir.yml b/content/10.zk-stack/10.components/60.prover/40.circuits/_dir.yml new file mode 100644 index 00000000..6ea460e7 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/40.circuits/_dir.yml @@ -0,0 +1 @@ +title: Circuits diff --git a/content/10.zk-stack/10.components/60.prover/50.boojum-gadgets.md b/content/10.zk-stack/10.components/60.prover/50.boojum-gadgets.md new file mode 100644 index 00000000..214d155f --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/50.boojum-gadgets.md @@ -0,0 +1,196 @@ +--- +title: Boojum Gadgets +description: +--- + +Gadgets in ZK programming are common arrangement of circuits abstracted for ease of use. +Boojum gadgets are low-level implementations of tools for constraint systems. +They consist of various types such as curves, +hash functions, lookup tables, and different circuit types. + +These gadgets are mostly a reference from +[franklin-crypto](%%zk_git_repo_franklin-crypto%%), with additional hash functions added. These gadgets +have been changed to use the Goldilocks field (order 2^64 - 2^32 + 1), which is much smaller than bn256. This allows us +to reduce the proof system. + +## Circuits types + +We have next types with we use for circuits: + +**Num (Number):** + +```rust +pub struct Num<F: SmallField> { + pub(crate) variable: Variable, + pub(crate) _marker: std::marker::PhantomData<F>, +} +``` + +**Boolean:** + +```rust +pub struct Boolean<F: SmallField> { + pub(crate) variable: Variable, + pub(crate) _marker: std::marker::PhantomData<F>, +} +``` + +**U8:** + +```rust +pub struct UInt8<F: SmallField> { + pub(crate) variable: Variable, + pub(crate) _marker: std::marker::PhantomData<F>, +} +``` + +**U16:** + +```rust +pub struct UInt16<F: SmallField> { + pub(crate) variable: Variable, + pub(crate) _marker: std::marker::PhantomData<F>, +} +``` + +**U32:** + +```rust +pub struct UInt32<F: SmallField> { + pub(crate) variable: Variable, + pub(crate) _marker: std::marker::PhantomData<F>, +} +``` + +**U160:** + +```rust +pub struct UInt160<F: SmallField> { + pub inner: [UInt32<F>; 5], +} +``` + +**U256:** + +```rust +pub struct UInt256<F: SmallField> { + pub inner: [UInt32<F>; 8], +} +``` + +**U512:** + +```rust +pub struct UInt512<F: SmallField> { + pub inner: [UInt32<F>; 16], +} +``` + +Every type consists of a Variable (the number inside Variable is just the index): + +```rust +pub struct Variable(pub(crate) u64); +``` + +which is represented in the current Field. Variable is quite diverse. To have "good" alignment and size we manually +do encoding management to be able to represent it as both a variable (that can be copied) or witness. + +The implementation of this circuit type itself is similar. We can also divide them into classes as main and dependent: +Such type like U8-U512 decoding inside functions to Num<F> for using them in logical operations. As mentioned above, the +property of these types is to perform logical operations and allocate witnesses. + +Let's demonstrate this in a Boolean example: + +```rust +impl<F: SmallField> CSAllocatable<F> for Boolean<F> { + type Witness = bool; + fn placeholder_witness() -> Self::Witness { + false + } + + #[inline(always)] + fn allocate_without_value<CS: ConstraintSystem<F>>(cs: &mut CS) -> Self { + let var = cs.alloc_variable_without_value(); + + Self::from_variable_checked(cs, var) + } + + fn allocate<CS: ConstraintSystem<F>>(cs: &mut CS, witness: Self::Witness) -> Self { + let var = cs.alloc_single_variable_from_witness(F::from_u64_unchecked(witness as u64)); + + Self::from_variable_checked(cs, var) + } +} +``` + +As you see, you can allocate both with and without witnesses. + +## Hash function + +In gadgets we have a lot of hast implementation: + +- blake2s +- keccak256 +- poseidon/poseidon2 +- sha256 + +Each of them perform different functions in our proof system. + +## Queues + +One of the most important gadgets in our system is queue. It helps us to send data between circuits. Here is the quick +explanation how it works: + +```rust +Struct CircuitQueue{ + head: HashState, + tail: HashState, + length: UInt32, + witness: VecDeque<Witness>, +} +``` + +The structure consists of `head` and `tail` commitments that basically are rolling hashes. Also, it has a `length` of +the queue. These three fields are allocated inside the constraint system. Also, there is a `witness`, that keeps actual +values that are now stored in the queue. + +And here is the main functions: + +```rust +fn push(&mut self, value: Element) { + // increment length + // head - hash(head, value) + // witness.push_back(value.witness) +} + +fn pop(&mut self) -> Element { + // check length > 0 + // decrement length + // value = witness.pop_front() + // tail = hash(tail, value) + // return value +} + +fn final_check(&self) -> Element { + // check that length == 0 + // check that head == tail +} +``` + +So the key point, of how the queue proofs that popped elements are the same as pushed ones, is equality of rolling +hashes that stored in fields `head` and `tail`. + +Also, we check that we can’t pop an element before it was pushed. This is done by checking that `length >= 0`. + +Very important is making the `final_check` that basically checks the equality of two hashes. So if the queue is never +empty, and we haven’t checked the equality of `head` and `tail` in the end, we also haven’t proven that the elements we +popped are correct. + +For now, we use poseidon2 hash. Here is the link to queue implementations: + +- [CircuitQueue](%%zk_git_repo_era-boojum%%/blob/main/src/gadgets/queue/mod.rs#L29) +- [FullStateCircuitQueue](%%zk_git_repo_era-boojum%%/blob/main/src/gadgets/queue/full_state_queue.rs#L20C12-L20C33) + +The difference is that we actually compute and store a hash inside CircuitQueue during `push` and `pop` operations. But +in FullStateCircuitQueue our `head` and `tail` are just states of sponges. So instead of computing a full hash, we just +absorb a pushed (popped) element. diff --git a/content/10.zk-stack/10.components/60.prover/60.boojum-function-check-if-satisfied.md b/content/10.zk-stack/10.components/60.prover/60.boojum-function-check-if-satisfied.md new file mode 100644 index 00000000..857fa954 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/60.boojum-function-check-if-satisfied.md @@ -0,0 +1,100 @@ +--- +title: Boojum Function - `check_if_satisfied` +description: +--- + +_Note: Please read our other documentation and tests first before reading this page._ + +Our circuits (and tests) depend on a function from Boojum called +[`check_if_satisfied`](%%zk_git_repo_era-boojum%%/blob/main/src/cs/implementations/satisfiability_test.rs#L11). +You don’t need to understand it to run circuit tests, but it can be informative to learn more about Boojum and our proof +system. + +First we prepare the constants, variables, and witness. As a reminder, the constants are just constant numbers, the +variables circuit columns that are under PLONK copy-permutation constraints (so they are close in semantics to variables +in programming languages), and the witness ephemeral values that can be used to prove certain constraints, for example +by providing an inverse if the variable must be non-zero. + +![Check_if_satisfied.png](/images/zk-stack/Check_if_satisfied.png) + +Next we prepare a view. Instead of working with all of the columns at once, it can be helpful to work with only a +subset. + +![Check_if_satisfied(1).png](/images/zk-stack/Check_if_satisfied-1.png) + +Next we create the paths_mappings. For each gate in the circuit, we create a vector of booleans in the correct shape. +Later, when we traverse the gates with actual inputs, we’ll be able to remember which gates should be satisfied at +particular rows by computing the corresponding selector using constant columns and the paths_mappings. + +![Check_if_satisfied(2).png](/images/zk-stack/Check_if_satisfied-2.png) + +Now, we have to actually check everything. The checks for the rows depend on whether they are under general purpose +columns, or under special purpose columns. + +**General purpose rows:** + +For each row and gate, we need several things. + +- Evaluator for the gate, to compute the result of the gate +- Path for the gate from the paths_mappings, to locate the gate +- Constants_placement_offset, to find the constants +- Num_terms in the evaluator + - If this is zero, we can skip the row since there is nothing to do +- Gate_debug_name +- num_constants_used +- this_view +- placement (described below) +- evaluation function + +![Check_if_satisfied(3).png](/images/zk-stack/Check_if_satisfied-3.png) + +Placement is either UniqueOnRow or MultipleOnRow. UniqueOnRow means there is only one gate on the row (typically because +the gate is larger / more complicated). MultipleOnRow means there are multiple gates within the same row (typically +because the gate is smaller). For example, if a gate only needs 30 columns, but we have 150 columns, we could include +five copies for that gate in the same row. + +Next, if the placement is UniqueOnRow, we call evaluate_over_general_purpose_columns. All of the evaluations should be +equal to zero, or we panic. + +![Check_if_satisfied(4).png](/images/zk-stack/Check_if_satisfied-4.png) + +If the placement is MultipleOnRow, we again call evaluate_over_general_purpose_columns. If any of the evaluations are +non-zero, we log some extra debug information, and then panic. + +![Check_if_satisfied(7).png](/images/zk-stack/Check_if_satisfied-7.png) + +This concludes evaluating and checking the generalized rows. Now we will check the specialized rows. + +![Check_if_satisfied(8).png](/images/zk-stack/Check_if_satisfied-8.png) + +We start by initializing vectors for specialized_placement_data, evaluation_functions, views, and evaluator_names. Then, +we iterate over each gate_type_id and evaluator. + +![Check_if_satisfied(9).png](/images/zk-stack/Check_if_satisfied-9.png) + +If gate_type_id is a LookupFormalGate, we don’t need to do anything in this loop because it is handled by the lookup +table. For all other cases, we need to check the evaluator’s total_quotient_terms_over_all_repetitions is non-zero. + +![Check_if_satisfied(11).png](/images/zk-stack/Check_if_satisfied-11.png) + +Next, we get num_terms, num_repetitions, and share_constants, total_terms, initial_offset, per_repetition_offset, and +total_constants_available. All of these together form our placement data. + +![Check_if_satisfied(12).png](/images/zk-stack/Check_if_satisfied-12.png) + +![Check_if_satisfied(13).png](/images/zk-stack/Check_if_satisfied-13.png) + +Once we know the placement_data, we can keep it for later, as well as the evaluator for this gate. + +![Check_if_satisfied(14).png](/images/zk-stack/Check_if_satisfied-14.png) + +We also will keep the view and evaluator name. This is all the data we need from our specialized columns. + +To complete the satisfaction test on the special columns, we just need to loop through and check that each of the +evaluations are zero. + +![Check_if_satisfied(16).png](/images/zk-stack/Check_if_satisfied-16.png) + +![Check_if_satisfied(17).png](/images/zk-stack/Check_if_satisfied-17.png) + +Now we have checked every value on every row, so the satisfaction test is passed, and we can return true. diff --git a/content/10.zk-stack/10.components/60.prover/_dir.yml b/content/10.zk-stack/10.components/60.prover/_dir.yml new file mode 100644 index 00000000..771aa598 --- /dev/null +++ b/content/10.zk-stack/10.components/60.prover/_dir.yml @@ -0,0 +1 @@ +title: Prover diff --git a/content/10.zk-stack/10.components/70.compiler/10.toolchain/10.index.md b/content/10.zk-stack/10.components/70.compiler/10.toolchain/10.index.md new file mode 100644 index 00000000..6dda6359 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/10.toolchain/10.index.md @@ -0,0 +1,85 @@ +--- +title: Compiler Toolchain Overview +description: +--- + +This section introduces the zkEVM LLVM-based compiler toolchain for smart contract languages with Ethereum Virtual Machine (EVM) support. +The toolchain works on top of existing compilers and requires their output, which typically includes intermediate representations (IRs), +abstract syntax trees (ASTs), and auxiliary contract metadata and documentation. + +::callout{icon="i-heroicons-information-circle" color="blue"} +At the time of writing, we support Solidity and Vyper. +:: + +The toolchain consists of the following: + +1. [High-level source code compilers](#high-level-source-code-compilers): `solc` and `vyper`. +2. [IR compilers, front ends to LLVM](#ir-compilers): `zksolc` and `zkvyper`. +3. [The LLVM framework](/zk-stack/components/compiler/toolchain/llvm) with a zkEVM back end which emits zkEVM text assembly. +4. [The assembler](#assembler) which produces the zkEVM bytecode from text assembly. +5. [Hardhat plugins](#hardhat-plugins) which set up the environment. + +![Compiler Toolchain Visualization](/images/zk-stack/compiler-toolchain.png "Compiler Toolchain") + +## High-level Source Code Compilers + +High-level source code is processed by third-party compilers. These compilers do the following: + +1. Process and validate the high-level source code. +2. Translate the source code into IR and metadata. +3. Pass the IR and metadata to our IR compilers via the standard I/O streams. + +We are using two high-level source code compilers at the time of writing: + +- [solc](https://github.com/ethereum/solc-bin): the official Solidity compiler. For more info, see the latest [Solidity documentation](https://docs.soliditylang.org/en/latest/). +- [vyper](https://github.com/vyperlang/vyper/releases): the official Vyper compiler. For more info, see the latest [Vyper documentation](https://docs.vyperlang.org/en/latest/index.html). + +::callout{icon="i-heroicons-information-circle" color="blue"} +**Security and best practices:** +<br /> +Follow the [security considerations and best practices](/build/developer-reference/best-practices#security-and-best-practices) +to build smart contracts on zkSync Era. +:: + +## IR Compilers + +Our toolchain includes LLVM front ends, written in Rust, that process the output of high-level source code compilers: + +- [zksolc](%%zk_git_repo_zksolc-bin%%) which calls `solc` as a child process. For more info, see the latest [zksolc documentation](/zk-stack/components/compiler/toolchain/solidity). +- [zkvyper](%%zk_git_repo_zkvyper-bin%%): which calls `vyper` as a child process. For more info, see the latest [zkvyper documentation](/zk-stack/components/compiler/toolchain/vyper). + +These IR compilers perform the following steps: + +1. Receive the input, which is usually standard or combined JSON passed by the Hardhat plugin via standard input. +2. Save the relevant data, modify the input with zkEVM settings, and pass it to the underlying high-level source code compiler +which is called as a child process. +3. Receive the IR and metadata from the underlying compiler. +4. Translate the IR into LLVM IR, resolving dependencies with the help of metadata. +5. Optimize the LLVM IR with the powerful LLVM framework optimizer and emit zkEVM text assembly. +6. Print the output matching the format of the input method the IR compiler is called with. + +Our IR compilers leverage I/O mechanisms which already exist in the high-level source code +compilers. They may modify the input and output to some extent, add data for features unique to zkEVM, +and remove unsupported feature artifacts. + +## Assembler + +The [assembler](%%zk_git_repo_era-zkEVM-assembly%%), which is written in Rust, compiles zkEVM assembly +to zkEVM bytecode. This tool is not a part of our LLVM back end as it uses several cryptographic libraries which are +easier to maintain outside of the framework. + +## Hardhat Plugins + +We recommend using our IR compilers via [their corresponding Hardhat plugins](/build/tooling/hardhat/getting-started). +Add these plugins to the Hardhat's config file to compile new projects or migrate +existing ones to zkSync Era. For a lower-level approach, download our compiler binaries via the +links above and use their CLI interfaces. + +### Installing and configuring plugins + +Add the plugins below to the Hardhat's config file to compile new projects or migrate +existing ones to zkSync Era. For a lower-level approach, download our compiler binaries +[links above](#ir-compilers) and use their CLI interfaces. + +- [hardhat-zksync-solc documentation](/build/tooling/hardhat/hardhat-zksync-solc) +- [hardhat-zksync-vyper documentation](/build/tooling/hardhat/hardhat-zksync-vyper) diff --git a/content/10.zk-stack/10.components/70.compiler/10.toolchain/20.solidity.md b/content/10.zk-stack/10.components/70.compiler/10.toolchain/20.solidity.md new file mode 100644 index 00000000..0ef59ced --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/10.toolchain/20.solidity.md @@ -0,0 +1,121 @@ +--- +title: Solidity Compiler +description: +--- + +The compiler we provide as a part of our toolchain is called [zksolc](%%zk_git_repo_zksolc-bin%%). It +operates on IR and metadata received from the underlying [solc](https://docs.soliditylang.org/en/latest/) compiler, +which must be available in `$PATH`, or its path must be explicitly passed via the CLI (command-line interface). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +To safeguard the security and efficiency of your application, always use the latest compiler version. +:: + +## Usage + +Make sure your machine satisfies the [system requirements](%%zk_git_repo_era-compiler-solidity%%/tree/main#system-requirements). + +Using our compiler via the Hardhat plugin usually suffices. However, knowledge of its interface and I/O (input/output) +methods are crucial for integration, debugging, or contribution purposes. + +The CLI supports several I/O modes: + +1. Standard JSON. +2. Combined JSON. +3. Free-form output. + +All three modes use the standard JSON `solc` interface internally. This reduces the complexity of the `zksolc` +interface and facilitates testing. + +### Standard JSON + +The `zksolc` standard JSON I/O workflow closely follows that of the official `solc` compiler. However, `zksolc` does not +support some configuration settings which are only relevant to the EVM architecture. + +Additional zkEVM data is supported by `zksolc` but is omitted when passed to `solc`: + +- `settings/optimizer/mode`: sets the optimization mode. Available values: `0`, `1`, `2`, `3`, `s`, `z`. The default + setting is `3`. See [LLVM optimizer](llvm#optimizer). +- `settings/optimizer/fallback_to_optimizing_for_size`: tries to compile again in `z` mode if the bytecode is too large for zkEVM. +- `settings/optimizer/disable_system_request_memoization`: disables the memoization of data received in requests to System Contracts. + +Unsupported sections of the input JSON, ignored by `zksolc`: + +- `sources/<file>/urls` +- `sources/destructible` +- `settings/stopAfter` +- `settings/evmVersion` +- `settings/debug` +- `settings/metadata`: for zkEVM you can only append `keccak256` metadata hash to the bytecode. +- `settings/modelChecker` + +Additional zkEVM data inserted by `zksolc`: + +- `long_version`: the full `solc` version output. +- `zk_version`: the `zksolc` version. +- `contract/hash`: the hash of the zkEVM bytecode. +- `contract/factory_dependencies`: bytecode hashes of contracts created in the current contract with `CREATE`. + +[More details here](/build/developer-reference/ethereum-differences/contract-deployment#note-on-factory-deps). + +Unsupported sections of the output JSON, ignored by `zksolc`: + +- `contracts/<file>/<contract>/evm/bytecode`: replaced with a JSON object with zkEVM build data. +- `contracts/<file>/<contract>/ewasm` + +See the complete standard JSON data structures in [the zksolc repository](%%zk_git_repo_era-compiler-solidity%%/tree/main/src/solc/standard_json). + +### Combined JSON + +The `zksolc` standard JSON I/O workflow closely follows that of the official `solc` compiler. However, `zksolc` does not +support some configuration settings which are only relevant to the EVM architecture. + +Combined JSON is only an output format; there is no combined JSON input format. Instead, CLI arguments are +used for configuration. + +Additional zkEVM data, inserted by `zksolc`: + +- `zk_version`: the version of `zksolc`. +- `contract/factory_deps`: bytecode hashes of contracts created by the current contract with `CREATE`. + +[More details here](/build/developer-reference/ethereum-differences/contract-deployment#note-on-factory-deps). + +Unsupported combined JSON flags, rejected by `zksolc`: + +- `function-debug` +- `function-debug-runtime` +- `generated-sources` +- `generated-sources-runtime` +- `opcodes` +- `srcmap` +- `srcmap-runtime` + +For more information, see the complete combined JSON data structures in [the zksolc repository](%%zk_git_repo_era-compiler-solidity%%/tree/main/src/solc/combined_json). + +### Free-form output + +This output format is utilized in Yul and LLVM IR compilation modes. These modes currently only support compiling a single +file. Only `--asm` and `--bin` output flags are supported, so this mode can be useful for debugging and prototyping. + +## Limitations + +Currently, Solidity versions as old as `0.4.12` are supported, although **we strongly recommend using** the latest +supported revision of `0.8`, as older versions contain known bugs and have limitations dictated by the absence of IR with +sufficient level of abstraction over EVM. + +Projects written in Solidity `>=0.8` are compiled by default through the Yul pipeline, whereas those written in `<=0.7` are compiled +via EVM legacy assembly which is a less friendly IR due to its obfuscation of control-flow and call graphs. +Due to this obfuscation, there are several limitations in zkSync for contracts written in Solidity `<=0.7`: + +1. Recursion on the stack is not supported. +2. Internal function pointers are not supported. +3. Contract size and performance may be affected. + +## Using libraries + +The usage of libraries in Solidity is supported in zkSync Era with the following considerations: + +- If a Solidity library can be inlined (i.e. it only contains `private` or `internal` methods), it can be used without + any additional configuration. +- However, if a library contains at least one `public` or `external` method, it cannot be inlined and its address needs + to be passed explicitly to the compiler; see [compiling non-inlinable libraries](/build/tooling/hardhat/compiling-libraries#compiling-non-inlinable-libraries). diff --git a/content/10.zk-stack/10.components/70.compiler/10.toolchain/30.vyper.md b/content/10.zk-stack/10.components/70.compiler/10.toolchain/30.vyper.md new file mode 100644 index 00000000..7072f64e --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/10.toolchain/30.vyper.md @@ -0,0 +1,49 @@ +--- +title: Vyper Compiler +description: +--- + +The Vyper compiler we provide as part of our toolchain is called [zkvyper](%%zk_git_repo_zkvyper-bin%%). It +operates on Vyper’s LLL IR, and metadata received from the underlying [vyper](https://docs.vyperlang.org/en/latest/index.html) compiler, +which must be available in `$PATH`, or its path must be explicitly passed via the CLI (command-line interface). + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +To safeguard the security and efficiency of your application, always use the latest compiler version. +:: + +## Usage + +Make sure your machine satisfies the [system requirements](%%zk_git_repo_era-compiler-vyper%%/tree/main#system-requirements). + +Using our compiler via the Hardhat plugin usually suffices. However, knowledge of its interface and I/O (input/output) +methods are crucial for integration, debugging, or contribution purposes. + +#### Combined JSON + +The `zkvyper` standard JSON I/O workflow closely follows that of the official `vyper` compiler. However, `zkvyper` does not +support some configuration settings which are only relevant to the EVM architecture. + +Combined JSON is only an output format; there is no combined JSON input format. Instead, CLI arguments are +used for configuration. + +Additional zkEVM data is inserted into the output combined JSON by `zksolc`: + +- `zk_version`: the `zksolc` version. +- `contract/factory_deps`: bytecode hashes of contracts created in the current contract with `CREATE`. + Since Vyper does not support `CREATE` directly, only the forwarder can be present in this mapping. + + [More details here](/build/developer-reference/ethereum-differences/contract-deployment#note-on-factory-deps). + +Regardless of the requested output, only the `combined_json`, `abi`, `method_identifiers`, `bytecode`, `bytecode_runtime` +flags are supported, while the rest are ignored. + +Other output formats are available via the `-f` option. Check out `vyper --help` for more details. + +## Limitations + +Versions from 0.3.4 to 0.3.8 are not supported. The only supported versions are 0.3.3, 0.3.9, 0.3.10. + +Also, since there is no separation of deploy and runtime code on EraVM, the following Vyper built-ins are not supported: + +- `create_copy_of` +- `create_from_blueprint` diff --git a/content/10.zk-stack/10.components/70.compiler/10.toolchain/40.llvm.md b/content/10.zk-stack/10.components/70.compiler/10.toolchain/40.llvm.md new file mode 100644 index 00000000..51d54263 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/10.toolchain/40.llvm.md @@ -0,0 +1,31 @@ +--- +title: LLVM Framework +description: +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +To safeguard the security and efficiency of your application, always use the latest compiler version. +:: + +[The LLVM framework](%%zk_git_repo_era-compiler-llvm%%) is a soundly-architected and well-tested framework +for developing toolchains for smart contract languages. Its powerful intermediate representation (IR) allows developers +to design, implement, and optimize efficient language-specific features while benefiting from the extensive LLVM ecosystem. +This ensures high-performance execution, improved portability, and seamless integration with existing LLVM-based tools. +Furthermore, the modularity and extensibility of LLVM make it easier to support new smart contract languages with LLVM front ends. + +Additionally, LLVM improves on the original EVM pipeline efficiency, as we can take advantage of the +numerous optimization passes, tools, and tests available in its mature ecosystem. + +In our toolchain, LLVM consumes the LLVM IR, applies extensive optimizations, and eventually passes the optimized IR +to the zkEVM back-end code generator in order to produce the zkEVM text assembly output. + +## Optimizer + +All our compilers utilize the state-of-the-art LLVM optimizer. +By default, they optimize for performance, which correlates with the number of VM cycles per transaction, +thus affecting gas usage. +The `z` option may reduce the contract size for large contracts, making deployments cheaper while increasing the average transaction price. + +## Diving deeper + +For more information on the LLVM framework, [see the official documentation](https://llvm.org/). diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/10.index.md b/content/10.zk-stack/10.components/70.compiler/20.specification/10.index.md new file mode 100644 index 00000000..04ca1384 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/10.index.md @@ -0,0 +1,40 @@ +--- +title: Overview +description: +--- + +This is a technical deep-dive into the specifics of how the compiler works. +If you're looking to just deploy a contract, +please visit [Toolchain](/zk-stack/components/compiler/toolchain) to understand the specifics around our Solidity, Vyper and LLVM compilers. + +## Glossary + +| Entity | Description | +| ------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| zksolc | The Solidity compiler, developed by Matter Labs. | +| solc | The original Solidity compiler, developed by the Ethereum community. Called by zksolc as a subprocess to get the IRs of the source code of the project. | +| LLVM | The compiler framework, used for optimizations and assembly generation. | +| EraVM assembler/linker | The tool written in Rust. Translates the assembly emitted by LLVM to the target bytecode. | +| Virtual machine | The zkSync Era virtual machine called EraVM with a custom instruction set. | +| [EraVM specification](%%zk_git_repo_eravm-spec%%/spec.html) | A combination of a human readable documentation and a formal description of EraVM, including its structure and operation, instruction syntax, semantic, and encoding. | +| Intermediate representation (IR) | The data structure or code used internally by the compiler to represent source code. | +| Yul | One of the Solidity IRs. Is a superset of the assembly available in Solidity. Used by default for contracts written in Solidity ≥0.8. | +| EVMLA | One of the Solidity IRs called EVM legacy assembly. Is a predecessor of Yul, but must closer to the pure EVM bytecode. Used by default for contracts written in Solidity <0.8. | +| LLVM IR | The IR native to the LLVM framework. | +| EraVM assembly | The text representation of the EraVM bytecode. Emitted by the LLVM framework. Translated into the EraVM bytecode by the EraVM assembler/linker. | +| EraVM bytecode | The smart contract bytecode, executed by EraVM. | +| Stack | The segment of the non-persistent contract memory. Consists of two parts: global data and function stack frame. | +| Heap | The segment of the non-persistent contract memory. All the data is globally accessible by both the compiler and user code. The allocation is handled by the solc’s Yul/EVMLA allocator only. | +| Auxiliary heap | The segment of the non-persistent contract memory, introduced to avoid conflicts with the solc’s allocator. All the data is globally accessible by the compiler only. The allocation is handled by the zksolc’s compiler only. All contract calls specific to zkSync, including the system contracts, are made via the auxiliary heap. It is also used to return data (e.g. the array of immutables) from the constructor. | +| Calldata | The segment of the non-persistent contract memory. The heap or auxiliary heap of the parent/caller contract. | +| Return data | The segment of the non-persistent contract memory. The heap or auxiliary heap of the child/callee contract. | +| Contract storage | The persistent contract memory. No relevant differences from that of EVM. | +| System contracts | The special set of zkSync kernel contracts written in Solidity by Matter Labs. | +| Contract context | The special storage of VM that keeps data like the current address, the caller’s address, etc. | + +## Concepts + +- [Code Separation](/zk-stack/components/compiler/specification/code-separation) +- [System Contracts](/zk-stack/components/compiler/specification/system-contracts) +- [Exception Handling](/zk-stack/components/compiler/specification/exception-handling) +- [EVMLA translator](/zk-stack/components/compiler/specification/evmla-translator) diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/20.code-separation.md b/content/10.zk-stack/10.components/70.compiler/20.specification/20.code-separation.md new file mode 100644 index 00000000..760ad3c9 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/20.code-separation.md @@ -0,0 +1,79 @@ +--- +title: Code Separation +description: +--- + +On both EVM and EraVM the code is separated into two parts: deploy code and runtime code. The deploy code is executed +only once, when the contract is deployed. The runtime code is executed every time the contract is called. However, on +EraVM the deploy code and runtime code are deployed together, and they are not split into two separate chunks of +bytecode. + +The constructor is added to the contract as a regular public function which is called by System Contracts during +deployment. + +Just like on EVM, the deploy code on EraVM is represented by a single constructor, named differently in different +languages: + +| Language | Name | +| -------- | ----------------- | +| Solidity | `constructor` | +| Yul | `object "<name>"` | +| Vyper | `__init__` | + +The constructor is merged with the runtime code by the LLVM IR generator of our compiler, and a minimal contract on +EraVM looks like on the examples below. + +### LLVM IR + +In the example below, contract `@__entry` arguments `%0`-`%11` correspond to registers `r1`-`r12` on EraVM. + +```txt +; Function Attrs: nofree noreturn null_pointer_is_valid +define i256 @__entry(ptr addrspace(3) nocapture readnone %0, i256 %1, i256 %2, i256 %3, i256 %4, i256 %5, i256 %6, i256 %7, i256 %8, i256 %9, i256 %10, i256 %11) local_unnamed_addr #1 personality ptr @__personality { +entry: + %is_deploy_code_call_flag_truncated = and i256 %1, 1 ; check if the call is a deploy code call + %is_deploy_code_call_flag.not = icmp eq i256 %is_deploy_code_call_flag_truncated, 0 ; invert the flag + br i1 %is_deploy_code_call_flag.not, label %runtime_code_call_block, label %deploy_code_call_block ; branch to the deploy code block if the flag is set + +deploy_code_call_block: ; preds = %entry + store i256 32, ptr addrspace(2) inttoptr (i256 256 to ptr addrspace(2)), align 256 ; store the offset of the array of immutables + store i256 0, ptr addrspace(2) inttoptr (i256 288 to ptr addrspace(2)), align 32 ; store the length of the array of immutables + tail call void @llvm.syncvm.return(i256 53919893334301279589334030174039261352344891250716429051063678533632) ; return the array of immutables using EraVM return ABI data encoding + unreachable + +runtime_code_call_block: ; preds = %entry + store i256 42, ptr addrspace(1) null, align 4294967296 ; store a value to return + tail call void @llvm.syncvm.return(i256 2535301200456458802993406410752) ; return the value using EraVM return ABI data encoding + unreachable +} +``` + +### EraVM Assembly + +```asm + .text + .file "default.yul" + .globl __entry +__entry: +.func_begin0: + and! 1, r2, r1 ; check if the call is a deploy code call + jump.ne @.BB0_1 ; branch to the deploy code block if the flag is set + add 42, r0, r1 ; move the value to return into r1 + st.1 0, r1 ; store the value to return + add @CPI0_1[0], r0, r1 ; move the return ABI data into r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN ; return the value +.BB0_1: + add 32, r0, r1 ; move the offset of the array of immutables into r1 + st.2 256, r1 ; store the offset of the array of immutables + st.2 288, r0 ; store the length of the array of immutables + add @CPI0_0[0], r0, r1 ; move the return ABI data into r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN ; return the array of immutables +.func_end0: + + .note.GNU-stack + .rodata +CPI0_0: + .cell 53919893334301279589334030174039261352344891250716429051063678533632 +CPI0_1: + .cell 2535301200456458802993406410752 +``` diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/30.system-contracts.md b/content/10.zk-stack/10.components/70.compiler/20.specification/30.system-contracts.md new file mode 100644 index 00000000..7faee90a --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/30.system-contracts.md @@ -0,0 +1,140 @@ +--- +title: System Contracts +description: +--- + +Many EVM instructions require special handling by the System Contracts. Among them are: `ORIGIN`, +`CALLVALUE`, `BALANCE`, `CREATE`, `SHA3`, and others. To see the full detailed list of instructions that require special +handling, see +[the EVM instructions reference](/zk-stack/components/compiler/specification/instructions/evm). + +There are several types of System Contracts from the perspective of how they are handled by the zkSync Era compilers: + +1. [Environmental data storage](#environmental-data-storage). +2. [KECCAK256 hash function](#keccak256-hash-function). +3. [Contract deployer](#contract-deployer). +4. [Ether value simulator](#ether-value-simulator). +5. [Simulator of immutables](#simulator-of-immutables). +6. [Event handler](#event-handler). + +### Environmental Data Storage + +Such storage contracts are accessed with static calls in order to retrieve values for the block, transaction, and other +environmental entities: `CHAINID`, `DIFFICULTY`, `BLOCKHASH`, etc. + +One good example of such contract is +[SystemContext](%%zk_git_repo_era-contracts%%/blob/main/system-contracts/contracts/SystemContext.sol) that provides +the majority of the environmental data. + +Since EVM is not using external calls for these instructions, we must use [the auxiliary heap](#auxiliary-heap) for +their calldata. + +Steps to handle such instructions: + +1. Store the calldata for the System Contract call on the auxiliary heap. +2. Call the System Contract with a static call. +3. Check the return status code of the call. +4. [Revert or throw](/zk-stack/components/compiler/specification/exception-handling) + if the status code is zero. +5. Read the ABI data and extract the result. All such System Contracts return a single 256-bit value. +6. Return the value as the result of the original instruction. + +For reference, see +[the LLVM IR codegen source code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/context/function/llvm_runtime.rs#L488). + +### KECCAK256 Hash Function + +Handling of this function is similar to [Environmental Data Storage](#environmental-data-storage) with one difference: + +Since EVM also uses heap to store the calldata for `KECCAK256`, the required memory chunk is allocated by the IR +generator, and zkSync Era compiler does not need to use [the auxiliary heap](#auxiliary-heap). + +For reference, see +[the LLVM IR codegen source code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/context/function/llvm_runtime.rs). + +### Contract Deployer + +See [handling CREATE](/build/developer-reference/ethereum-differences/evm-instructions#create-create2) +and +[dependency code substitution instructions](/build/developer-reference/ethereum-differences/evm-instructions#datasize-dataoffset-datacopy) +on zkSync Era documentation. + +For reference, see LLVM IR codegen for +[the deployer call](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/context/function/runtime/deployer_call.rs) +and +[CREATE-related instructions](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/create.rs). + +### Ether Value Simulator + +EraVM does not support passing Ether natively, so this is handled by a special System Contract called +[MsgValueSimulator](%%zk_git_repo_era-contracts%%/blob/main/system-contracts/contracts/MsgValueSimulator.sol). + +An external call is redirected through the simulator if the following conditions are met: + +1. The [call](/zk-stack/components/compiler/specification/instructions/evm/calls) has the Ether value parameter. +2. The Ether value is non-zero. + +Calls to the simulator require extra data passed via ABI using registers: + +1. Ether value. +2. The address of the contract to call. +3. The [system call bit](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#to_system), +which is only set if a call to the [ContractDeployer](#contract-deployer) is being redirected, that is `CREATE` or `CREATE2` is called with non-zero Ether. + +Passing Ether value in EraVM is implemented by using a combination of: + +- a special 128-bit register [`context_u128`](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#gs_context_u128) +which is a part of the EraVM [transient state](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#StateDefinitions); +- an [immutable value of `context_u128`](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#ecf_context_u128_value) +captured in the stack frame in a moment of a call. + +The process of setting up a value and capturing it is described in details in the section [Context Register of the EraVM specification](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#StateDefinitions). + +For reference, see [the LLVM IR codegen source code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/call.rs#L530). + +### Simulator of Immutables + +See [handling immutables](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable) +on zkSync Era documentation. + +For reference, see LLVM IR codegen for +[instructions for immutables](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/immutable.rs) +and +[RETURN from the deploy code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/return.rs#L28). + +### Event Handler + +Event payloads are sent to a special System Contract called +[EventWriter](%%zk_git_repo_era-contracts%%/blob/main/system-contracts/contracts/EventWriter.yul). +Like on EVM, the payload consists of topics and data: + +1. The topics with a length-prefix are passed via ABI using registers. +2. The data is passed via the default heap, like on EVM. + +For reference, see +[the LLVM IR codegen source code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/event.rs). + +## Auxiliary Heap + +Both [zksolc](/zk-stack/components/compiler/toolchain/solidity) and [zkvyper](/zk-stack/components/compiler/toolchain/vyper) +compilers for EraVM operate on [the IR level](/zk-stack/components/compiler/toolchain#ir-compilers), +so they cannot control the heap memory allocator which remains a responsibility of +[the high-level source code compilers](/zk-stack/components/compiler/toolchain#high-level-source-code-compilers) emitting the IRs. + +However, there are several cases where EraVM needs to allocate memory on the heap and EVM does not. The auxiliary heap is +used for these cases: + +[Returning immutables](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable) + +1. Returning immutables from the constructor. +2. Allocating calldata and return data for calling the System Contracts. + +While the ordinary heap contains calldata and return data for calls to **user contracts**, auxiliary heap contains calldata +and return data for calls to **System Contracts**. This ensures better compatibility with EVM as users should be able to call +EraVM-specific System Contracts in a transparent way, without System Contracts affecting calldata or return data. +This prevents situations where calling System Contracts interferes with the heap layout expected by the contract developer. + +For more details on the heaps, refer to the EraVM specification, +which describes [types of heaps](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#data_page_params), +their connections to the [stack frames and memory growth](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#ctx_heap_page_id), +and their role in [communication between contracts](%%zk_git_repo_matter-labs-github-io%%/eravm-spec/spec.html#MemoryForwarding). diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/40.exception-handling.md b/content/10.zk-stack/10.components/70.compiler/20.specification/40.exception-handling.md new file mode 100644 index 00000000..0325271c --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/40.exception-handling.md @@ -0,0 +1,132 @@ +--- +title: Exception Handling +description: +--- + +This document explains some peculiarities of the exception handling (EH) in zkEVM architecture. + +In a nutshell, there are two EH mechanisms in zkEVM: contract-level and function-level. +The former was inherited from the EVM architecture, and the latter is more common to general-purpose languages. + +| | Contract Level | Function Level | +| ------------ | --------------- | ----------------------------------------------------------------------------------------------------- | +| Yul Example | revert(0, 0) | verbatim("throw") | +| Native to | EVM | General-purpose languages | +| Handled by | zkEVM | Compiler | +| Catchable by | Caller contract | Caller function | +| Efficient | Yes | Huge size impact due to numerous catch blocks. Extra cycles are needed for propagating the exception. | + +## Contract Level + +This type of exceptions is inherited from the EVM architecture. On EVM, such instructions as `REVERT` and `INVALID`, +immediately terminate the contract execution and return the control to the callee. It is impossible to catch them +within the contract, and it can be only done on the callee side with checking the call status code. + +```solidity +// callee +revert(0, 0) + +// caller +let success = call(...) +if iszero(success) { + // option 1: rethrow on the contract level + returndatacopy(...) + revert(...) + + // option 2: rethrow on the function level + verbatim("throw") // only available in the Yul mode and upcoming zkEVM solc +} +``` + +zkEVM behaves exactly the same. The VM automatically unwinds the call stack up to the uppermost function frame +of the contract, leaving no possibility to catch and handle it on the way. + +These types of exceptions are more efficient, as you can revert at any point of the execution without propagating +the control flow all the way up to the uppermost function frame. + +### Implementation + +In EraVM, contracts call each other using [`far_call` instruction](%%zk_git_repo_eravm-spec%%/spec.html#FarCalls). +It [accepts the address of the exception handler](%%zk_git_repo_eravm-spec%%/spec.html#OpFarCall) as one of its arguments. + +## Function Level + +This type of exceptions is more common to general-purpose languages like C++. That is why it was easy to support +within the LLVM framework, even though it is not supported by the smart contract languages we work with. +That is also one of the reasons why the two EH mechanisms are handled separately and barely interact in the high-level code. + +In general-purpose languages a set of EH tools is usually available, e.g. `try` , `throw`, and `catch` keywords that +define which piece of code may throw and how the exception must be handled. However, these tools are not available +in Solidity and its EVM Yul dialect, so some extensions have been added in the zkEVM Yul dialect compiled by zksolc, +but there are limitations, some of which are dictated by the nature of smart contracts: + +1. Every function beginning with `ZKSYNC_NEAR_CALL` is implicitly wrapped with `try`. If there is an exception handler defined, the following will happen: + - A panic will be caught by the caller of such function. + - The control will be transferred to EH function. + There can be only one EH function and it must be named `ZKSYNC_CATCH_NEAR_CALL`. + It is not very efficient, because all functions must have an LLVM IR `catch` block that will catch and propagate the exception and call the EH function. + - When the EH function has finished executing, the caller of `ZKSYNC_NEAR_CALL` receives the control back. +2. Every operation is `throw`. +Since any instruction can panic due to out-of-gas, all of them can throw. +It is another thing reducing the potential for optimizations. +3. The `catch` block is represented by the `ZKSYNC_CATCH_NEAR_CALL` function in Yul. +A panic in `ZKSYNC_NEAR_CALL` will make **their caller** catch the exception and call the EH function. +After the EH function is executed, the control is returned to the caller of `ZKSYNC_NEAR_CALL`. + +```solidity +// Follow the numbers for the order of execution. The call order is: +// caller -> ZKSYNC_NEAR_CALL_callee -> callee_even_deeper -> ZKSYNC_CATCH_NEAR_CALL -> caller + +function ZKSYNC_NEAR_CALL_callee() -> value { // 03 + value := callee_even_deeper() // 04 +} + +function callee_even_deeper() -> value { // 05 + verbatim("throw") // 06 +} + +// In every function an implicit 'catch' block in LLVM IR is created. +// This block will do the following: +// 1. Keep the return value ('zero') zero-initialized if one is expected +// 2. Call the EH function ('ZKSYNC_CATCH_NEAR_CALL') +// 3. Return the control flow to the next instruction ('value := 42') +function caller() -> value { // 01 + let zero := ZKSYNC_NEAR_CALL_callee() // 02 + value := 42 // 09 +} + +// This handler could also be doing a revert. +// Reverts in EH functions work in the same way as without EH at all. +// They immediately terminate the execution and the control is given to the contract callee. +function ZKSYNC_CATCH_NEAR_CALL() { // 07 + log0(...) // 08 +} +``` + +Having all the overhead above, the `catch` blocks are only generated if there is the EH function `ZKSYNC_CATCH_NEAR_CALL` +defined in the contract. Otherwise there is no need to catch panics and they will be propagated to the callee contract +automatically by the VM execution environment. + +### Implementation + +In EraVM, there are two ways of implementing contract-local function calls: + +1. Saving the return address and using a [`jump`](%%zk_git_repo_eravm-spec%%/spec.html#JumpDefinition) instruction to call; +using [`jump`](%%zk_git_repo_eravm-spec%%/spec.html#JumpDefinition) instruction with saved return address to return. +2. Using + [`call`](%%zk_git_repo_eravm-spec%%/spec.html#NearCallDefinition) + instruction to call; using one of `ret` instructions with modifiers + [`ok`](%%zk_git_repo_eravm-spec%%/spec.html#NearRetDefinition), + [`revert`](%%zk_git_repo_eravm-spec%%/spec.html#NearRevertDefinition), or + [`panic`](%%zk_git_repo_eravm-spec%%/spec.html#step_oppanic) to return. + +Using `jump` is more lightweight and cheaper, but using `call`/`ret` is more feature-rich: + +1. In case of panic or revert, the storage effects and queues of this function are rolled back. +2. It is possible to pass a portion of available gas; the unused gas will be returned to the caller, unless the function panicked. +3. It is possible to set up a custom exception handler. + +Prefixing Yul function name with `ZKSYNC_NEAR_CALL_` allows to use this +additional, platform-specific functionality, implemented by the `call` +instruction. For other functions, the choice between `call`/`ret` or `jump` is +up to the compiler. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/50.evmla-translator.md b/content/10.zk-stack/10.components/70.compiler/20.specification/50.evmla-translator.md new file mode 100644 index 00000000..3a147dc5 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/50.evmla-translator.md @@ -0,0 +1,759 @@ +--- +title: EVM Legacy Assembly Translator +description: +--- + +There are two Solidity IRs used in our pipeline: Yul and EVM legacy assembly. The former is used for older versions of +Solidity, more precisely <=0.7. + +EVM legacy assembly is very challenging to translate to LLVM IR, since it obfuscates the control flow of the program and +uses a lot of dynamic jumps. Most of the jumps can be translated to static ones by using a static analysis of EVM assembly, +but some of jumps are impossible to resolve statically. For example, internal function pointers can be written +to memory or storage, and then loaded and called. Recursion is another case we have skipped for now, as there is another +stack frame allocated on every iteration, preventing the static analyzer from resolving the jumps. + +Both issues are being worked on in our fork of the Solidity compiler, where we are changing the codegen to remove the +dynamic jumps and add the necessary metadata. + +Below you can see a minimal example of a Solidity contract and its EVM legacy assembly translated to LLVM IR which is +eventually compiled to EraVM assembly. + +## Source Code + +```solidity +contract Example { + function main() public pure returns (uint256 result) { + result = 42; + } +} +``` + +## EVM Legacy Assembly + +Produced by the upstream Solidity compiler v0.7.6. + +```txt +| Line | Instruction | Value/Tag | +| ---- | ------------ | --------- | +| 000 | PUSH | 80 | +| 001 | PUSH | 40 | +| 002 | MSTORE | | +| 003 | CALLVALUE | | +| 004 | DUP1 | | +| 005 | ISZERO | | +| 006 | PUSH | [tag] 1 | +| 007 | JUMPI | | +| 008 | PUSH | 0 | +| 009 | DUP1 | | +| 010 | REVERT | | +| 011 | Tag 1 | | +| 012 | JUMPDEST | | +| 013 | POP | | +| 014 | PUSH | 4 | +| 015 | CALLDATASIZE | | +| 016 | LT | | +| 017 | PUSH | [tag] 2 | +| 018 | JUMPI | | +| 019 | PUSH | 0 | +| 020 | CALLDATALOAD | | +| 021 | PUSH | E0 | +| 022 | SHR | | +| 023 | DUP1 | | +| 024 | PUSH | 5A8AC02D | +| 025 | EQ | | +| 026 | PUSH | [tag] 3 | +| 027 | JUMPI | | +| 028 | Tag 2 | | +| 029 | JUMPDEST | | +| 030 | PUSH | 0 | +| 031 | DUP1 | | +| 032 | REVERT | | +| 033 | Tag 3 | | +| 034 | JUMPDEST | | +| 035 | PUSH | [tag] 4 | +| 036 | PUSH | [tag] 5 | +| 037 | JUMP | [in] | +| 038 | Tag 4 | | +| 039 | JUMPDEST | | +| 040 | PUSH | 40 | +| 041 | DUP1 | | +| 042 | MLOAD | | +| 043 | SWAP2 | | +| 044 | DUP3 | | +| 045 | MSTORE | | +| 046 | MLOAD | | +| 047 | SWAP1 | | +| 048 | DUP2 | | +| 049 | SWAP1 | | +| 050 | SUB | | +| 051 | PUSH | 20 | +| 052 | ADD | | +| 053 | SWAP1 | | +| 054 | RETURN | | +| 055 | Tag 5 | | +| 056 | JUMPDEST | | +| 057 | PUSH | 2A | +| 058 | SWAP1 | | +| 059 | JUMP | [out] | +``` + +## EthIR + +EthIR (Ethereal IR) is a special IR used by our translator to represent EVM legacy assembly and prepare it for the +translation to LLVM IR. The IR solves several purposes: + +1. Tracking the stack state to extract jump destinations. +2. Duplicating blocks that are reachable with different stack states. +3. Restoring the complete control-flow graph of the contract using the abovementioned data. +4. Resolving dependencies and static data chunks. + +Data format: + +1. `V_<name>` - value returned by an instruction `<name>`. +2. `T_<tag>` - tag of a block `<tag>`. +3. `40` - hexadecimal constant. +4. `tests/solidity/simple/default.sol:Test` - contract definition. + +Stack format: `[ V_CALLVALUE ]` (current values) - `[ V_CALLVALUE ]` (popped values) + `[ V_ISZERO ]` (pushed values) + +```text +// The default entry function of the contract. +function main { +// The maximum stack size in the function. + stack_usage: 6 +block_dt_0/0: // Deploy Code Tag 0, Instance 0. +// PUSHed 0x80 onto the stack. + PUSH 80 [ ] + [ 80 ] +// PUSHed 0x40 onto the stack. + PUSH 40 [ 80 ] + [ 40 ] +// POPped 0x40 at 0x80 from the stack to store 0x80 at 0x40. + MSTORE [ ] - [ 80 | 40 ] +// PUSHed CALLVALUE onto the stack. + CALLVALUE [ ] + [ V_CALLVALUE ] + DUP1 [ V_CALLVALUE ] + [ V_CALLVALUE ] + ISZERO [ V_CALLVALUE ] - [ V_CALLVALUE ] + [ V_ISZERO ] + PUSH [tag] 1 [ V_CALLVALUE | V_ISZERO ] + [ T_1 ] +// JUMPI schedules rt_0/0 for analysis with the current stack state. + JUMPI [ V_CALLVALUE ] - [ V_ISZERO | T_1 ] + PUSH 0 [ V_CALLVALUE ] + [ 0 ] + DUP1 [ V_CALLVALUE | 0 ] + [ 0 ] + REVERT [ V_CALLVALUE ] - [ 0 | 0 ] +block_dt_1/0: (predecessors: dt_0/0) // Deploy Code Tag 1, Instance 0; the only predecessor of this block is dt_0/0. +// JUMPDESTs are ignored as we are only interested in the stack state and tag destinations. + JUMPDEST [ V_CALLVALUE ] + POP [ ] - [ V_CALLVALUE ] + PUSH #[$] tests/solidity/simple/default.sol:Test [ ] + [ tests/solidity/simple/default.sol:Test ] + DUP1 [ tests/solidity/simple/default.sol:Test ] + [ tests/solidity/simple/default.sol:Test ] + PUSH [$] tests/solidity/simple/default.sol:Test [ tests/solidity/simple/default.sol:Test | tests/solidity/simple/default.sol:Test ] + [ tests/solidity/simple/default.sol:Test ] + PUSH 0 [ tests/solidity/simple/default.sol:Test | tests/solidity/simple/default.sol:Test | tests/solidity/simple/default.sol:Test ] + [ 0 ] + CODECOPY [ tests/solidity/simple/default.sol:Test ] - [ tests/solidity/simple/default.sol:Test | tests/solidity/simple/default.sol:Test | 0 ] + PUSH 0 [ tests/solidity/simple/default.sol:Test ] + [ 0 ] + RETURN [ ] - [ tests/solidity/simple/default.sol:Test | 0 ] +// The runtime code is analyzed in the same control-flow graph as the deploy code, as it is possible to call its functions from the constructor. +block_rt_0/0: // Deploy Code Tag 0, Instance 0. + PUSH 80 [ ] + [ 80 ] + PUSH 40 [ 80 ] + [ 40 ] + MSTORE [ ] - [ 80 | 40 ] + CALLVALUE [ ] + [ V_CALLVALUE ] + DUP1 [ V_CALLVALUE ] + [ V_CALLVALUE ] + ISZERO [ V_CALLVALUE ] - [ V_CALLVALUE ] + [ V_ISZERO ] + PUSH [tag] 1 [ V_CALLVALUE | V_ISZERO ] + [ T_1 ] + JUMPI [ V_CALLVALUE ] - [ V_ISZERO | T_1 ] + PUSH 0 [ V_CALLVALUE ] + [ 0 ] + DUP1 [ V_CALLVALUE | 0 ] + [ 0 ] + REVERT [ V_CALLVALUE ] - [ 0 | 0 ] +block_rt_1/0: (predecessors: rt_0/0) // Runtime Code Tag 1, Instance 0; the only predecessor of this block is rt_0/0. + JUMPDEST [ V_CALLVALUE ] + POP [ ] - [ V_CALLVALUE ] + PUSH 4 [ ] + [ 4 ] + CALLDATASIZE [ 4 ] + [ V_CALLDATASIZE ] + LT [ ] - [ 4 | V_CALLDATASIZE ] + [ V_LT ] + PUSH [tag] 2 [ V_LT ] + [ T_2 ] + JUMPI [ ] - [ V_LT | T_2 ] + PUSH 0 [ ] + [ 0 ] + CALLDATALOAD [ ] - [ 0 ] + [ V_CALLDATALOAD ] + PUSH E0 [ V_CALLDATALOAD ] + [ E0 ] + SHR [ ] - [ V_CALLDATALOAD | E0 ] + [ V_SHR ] + DUP1 [ V_SHR ] + [ V_SHR ] + PUSH 5A8AC02D [ V_SHR | V_SHR ] + [ 5A8AC02D ] + EQ [ V_SHR ] - [ V_SHR | 5A8AC02D ] + [ V_EQ ] + PUSH [tag] 3 [ V_SHR | V_EQ ] + [ T_3 ] + JUMPI [ V_SHR ] - [ V_EQ | T_3 ] + Tag 2 [ V_SHR ] +// This instance is called with a different stack state using the JUMPI above. +block_rt_2/0: (predecessors: rt_1/0) // Runtime Code Tag 2, Instance 0. + JUMPDEST [ ] + PUSH 0 [ ] + [ 0 ] + DUP1 [ 0 ] + [ 0 ] + REVERT [ ] - [ 0 | 0 ] +// This instance is also called from rt_1/0, but using a fallthrough 'Tag 2'. +// Given different stack states, we create a new instance of the block operating on different data +// and potentially different tag destinations, although usually such blocks are merged back by LLVM. +block_rt_2/1: (predecessors: rt_1/0) // Runtime Code Tag 2, Instance 1. + JUMPDEST [ V_SHR ] + PUSH 0 [ V_SHR ] + [ 0 ] + DUP1 [ V_SHR | 0 ] + [ 0 ] + REVERT [ V_SHR ] - [ 0 | 0 ] +block_rt_3/0: (predecessors: rt_1/0) // Runtime Code Tag 3, Instance 0. + JUMPDEST [ V_SHR ] + PUSH [tag] 4 [ V_SHR ] + [ T_4 ] + PUSH [tag] 5 [ V_SHR | T_4 ] + [ T_5 ] + JUMP [in] [ V_SHR | T_4 ] - [ T_5 ] +block_rt_4/0: (predecessors: rt_5/0) // Runtime Code Tag 4, Instance 0. + JUMPDEST [ V_SHR | 2A ] + PUSH 40 [ V_SHR | 2A ] + [ 40 ] + DUP1 [ V_SHR | 2A | 40 ] + [ 40 ] + MLOAD [ V_SHR | 2A | 40 ] - [ 40 ] + [ V_MLOAD ] + SWAP2 [ V_SHR | V_MLOAD | 40 | 2A ] + DUP3 [ V_SHR | V_MLOAD | 40 | 2A ] + [ V_MLOAD ] + MSTORE [ V_SHR | V_MLOAD | 40 ] - [ 2A | V_MLOAD ] + MLOAD [ V_SHR | V_MLOAD ] - [ 40 ] + [ V_MLOAD ] + SWAP1 [ V_SHR | V_MLOAD | V_MLOAD ] + DUP2 [ V_SHR | V_MLOAD | V_MLOAD ] + [ V_MLOAD ] + SWAP1 [ V_SHR | V_MLOAD | V_MLOAD | V_MLOAD ] + SUB [ V_SHR | V_MLOAD ] - [ V_MLOAD | V_MLOAD ] + [ V_SUB ] + PUSH 20 [ V_SHR | V_MLOAD | V_SUB ] + [ 20 ] + ADD [ V_SHR | V_MLOAD ] - [ V_SUB | 20 ] + [ V_ADD ] + SWAP1 [ V_SHR | V_ADD | V_MLOAD ] + RETURN [ V_SHR ] - [ V_ADD | V_MLOAD ] +block_rt_5/0: (predecessors: rt_3/0) // Runtime Code Tag 5, Instance 0. + JUMPDEST [ V_SHR | T_4 ] + PUSH 2A [ V_SHR | T_4 ] + [ 2A ] + SWAP1 [ V_SHR | 2A | T_4 ] +// JUMP [out] is usually a return statement + JUMP [out] [ V_SHR | 2A ] - [ T_4 ] +``` + +### Unoptimized LLVM IR + +In LLVM IR, the necessary stack space is allocated at the beginning of the function. + +Every stack operation interacts with a statically known stack pointer with an offset from EthIR. + +```txt +; Function Attrs: nofree null_pointer_is_valid +define i256 @__entry(ptr addrspace(3) %0, i256 %1, i256 %2, i256 %3, i256 %4, i256 %5, i256 %6, i256 %7, i256 %8, i256 %9, i256 %10, i256 %11) #8 personality ptr @__personality { +entry: + store i256 0, ptr @memory_pointer, align 32 + store i256 0, ptr @calldatasize, align 32 + store i256 0, ptr @returndatasize, align 32 + store i256 0, ptr @call_flags, align 32 + store [10 x i256] zeroinitializer, ptr @extra_abi_data, align 32 + store ptr addrspace(3) %0, ptr @ptr_calldata, align 32 + %abi_pointer_value = ptrtoint ptr addrspace(3) %0 to i256 + %abi_pointer_value_shifted = lshr i256 %abi_pointer_value, 96 + %abi_length_value = and i256 %abi_pointer_value_shifted, 4294967295 + store i256 %abi_length_value, ptr @calldatasize, align 32 + %calldatasize = load i256, ptr @calldatasize, align 32 + %return_data_abi_initializer = getelementptr i8, ptr addrspace(3) %0, i256 %calldatasize + store ptr addrspace(3) %return_data_abi_initializer, ptr @ptr_return_data, align 32 + store ptr addrspace(3) %return_data_abi_initializer, ptr @ptr_active, align 32 + store i256 %1, ptr @call_flags, align 32 + store i256 %2, ptr @extra_abi_data, align 32 + store i256 %3, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 1), align 32 + store i256 %4, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 2), align 32 + store i256 %5, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 3), align 32 + store i256 %6, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 4), align 32 + store i256 %7, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 5), align 32 + store i256 %8, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 6), align 32 + store i256 %9, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 7), align 32 + store i256 %10, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 8), align 32 + store i256 %11, ptr getelementptr inbounds ([10 x i256], ptr @extra_abi_data, i256 0, i32 9), align 32 + %is_deploy_code_call_flag_truncated = and i256 %1, 1 + %is_deploy_code_call_flag = icmp eq i256 %is_deploy_code_call_flag_truncated, 1 + br i1 %is_deploy_code_call_flag, label %deploy_code_call_block, label %runtime_code_call_block + +return: ; preds = %runtime_code_call_block, %deploy_code_call_block + ret i256 0 + +deploy_code_call_block: ; preds = %entry + call void @__deploy() + br label %return + +runtime_code_call_block: ; preds = %entry + call void @__runtime() + br label %return +} + +; Function Attrs: nofree null_pointer_is_valid +define private void @__deploy() #8 personality ptr @__personality { +entry: + call void @main(i1 true) + br label %return + +return: ; preds = %entry + ret void +} + +; Function Attrs: nofree null_pointer_is_valid +define private void @__runtime() #8 personality ptr @__personality { +entry: + call void @main(i1 false) + br label %return + +return: ; preds = %entry + ret void +} + +; Function Attrs: nofree null_pointer_is_valid +define private void @main(i1 %0) #8 personality ptr @__personality { ; 6 cells are allocated at the beginning of the function. +entry: + %stack_var_000 = alloca i256, align 32 + store i256 0, ptr %stack_var_000, align 32 + %stack_var_001 = alloca i256, align 32 + store i256 0, ptr %stack_var_001, align 32 + %stack_var_002 = alloca i256, align 32 + store i256 0, ptr %stack_var_002, align 32 + %stack_var_003 = alloca i256, align 32 + store i256 0, ptr %stack_var_003, align 32 + %stack_var_004 = alloca i256, align 32 + store i256 0, ptr %stack_var_004, align 32 + %stack_var_005 = alloca i256, align 32 + store i256 0, ptr %stack_var_005, align 32 + br i1 %0, label %"block_dt_0/0", label %"block_rt_0/0" + +return: ; No predecessors! + ret void + +"block_dt_0/0": ; preds = %entry + store i256 128, ptr %stack_var_000, align 32 + store i256 64, ptr %stack_var_001, align 32 + %argument_0 = load i256, ptr %stack_var_001, align 32 + %argument_1 = load i256, ptr %stack_var_000, align 32 + %memory_store_pointer = inttoptr i256 %argument_0 to ptr addrspace(1) + store i256 %argument_1, ptr addrspace(1) %memory_store_pointer, align 1 + %get_u128_value = call i256 @llvm.syncvm.getu128() + store i256 %get_u128_value, ptr %stack_var_000, align 32 + %dup1 = load i256, ptr %stack_var_000, align 32 + store i256 %dup1, ptr %stack_var_001, align 32 + %argument_01 = load i256, ptr %stack_var_001, align 32 + %comparison_result = icmp eq i256 %argument_01, 0 + %comparison_result_extended = zext i1 %comparison_result to i256 + store i256 %comparison_result_extended, ptr %stack_var_001, align 32 + store i256 1, ptr %stack_var_002, align 32 + %conditional_dt_1_condition = load i256, ptr %stack_var_001, align 32 + %conditional_dt_1_condition_compared = icmp ne i256 %conditional_dt_1_condition, 0 + br i1 %conditional_dt_1_condition_compared, label %"block_dt_1/0", label %conditional_dt_1_join_block + +"block_dt_1/0": ; preds = %"block_dt_0/0" + store i256 0, ptr %stack_var_000, align 32 + %dup15 = load i256, ptr %stack_var_000, align 32 + store i256 %dup15, ptr %stack_var_001, align 32 + store i256 0, ptr %stack_var_002, align 32 + store i256 0, ptr %stack_var_003, align 32 + %argument_06 = load i256, ptr %stack_var_003, align 32 + %argument_17 = load i256, ptr %stack_var_002, align 32 + %argument_2 = load i256, ptr %stack_var_001, align 32 + %calldata_copy_destination_pointer = inttoptr i256 %argument_06 to ptr addrspace(1) + %calldata_pointer = load ptr addrspace(3), ptr @ptr_calldata, align 32 + %calldata_source_pointer = getelementptr i8, ptr addrspace(3) %calldata_pointer, i256 %argument_17 + call void @llvm.memcpy.p1.p3.i256(ptr addrspace(1) align 1 %calldata_copy_destination_pointer, ptr addrspace(3) align 1 %calldata_source_pointer, i256 %argument_2, i1 false) + store i256 0, ptr %stack_var_001, align 32 + %argument_08 = load i256, ptr %stack_var_001, align 32 + %argument_19 = load i256, ptr %stack_var_000, align 32 + store i256 32, ptr addrspace(2) inttoptr (i256 256 to ptr addrspace(2)), align 1 + store i256 0, ptr addrspace(2) inttoptr (i256 288 to ptr addrspace(2)), align 1 + call void @__return(i256 256, i256 64, i256 2) + unreachable + +"block_rt_0/0": ; preds = %entry + store i256 128, ptr %stack_var_000, align 32 + store i256 64, ptr %stack_var_001, align 32 + %argument_010 = load i256, ptr %stack_var_001, align 32 + %argument_111 = load i256, ptr %stack_var_000, align 32 + %memory_store_pointer12 = inttoptr i256 %argument_010 to ptr addrspace(1) + store i256 %argument_111, ptr addrspace(1) %memory_store_pointer12, align 1 + %get_u128_value13 = call i256 @llvm.syncvm.getu128() + store i256 %get_u128_value13, ptr %stack_var_000, align 32 + %dup114 = load i256, ptr %stack_var_000, align 32 + store i256 %dup114, ptr %stack_var_001, align 32 + %argument_015 = load i256, ptr %stack_var_001, align 32 + %comparison_result16 = icmp eq i256 %argument_015, 0 + %comparison_result_extended17 = zext i1 %comparison_result16 to i256 + store i256 %comparison_result_extended17, ptr %stack_var_001, align 32 + store i256 1, ptr %stack_var_002, align 32 + %conditional_rt_1_condition = load i256, ptr %stack_var_001, align 32 + %conditional_rt_1_condition_compared = icmp ne i256 %conditional_rt_1_condition, 0 + br i1 %conditional_rt_1_condition_compared, label %"block_rt_1/0", label %conditional_rt_1_join_block + +"block_rt_1/0": ; preds = %"block_rt_0/0" + store i256 4, ptr %stack_var_000, align 32 + %calldatasize = load i256, ptr @calldatasize, align 32 + store i256 %calldatasize, ptr %stack_var_001, align 32 + %argument_021 = load i256, ptr %stack_var_001, align 32 + %argument_122 = load i256, ptr %stack_var_000, align 32 + %comparison_result23 = icmp ult i256 %argument_021, %argument_122 + %comparison_result_extended24 = zext i1 %comparison_result23 to i256 + store i256 %comparison_result_extended24, ptr %stack_var_000, align 32 + store i256 2, ptr %stack_var_001, align 32 + %conditional_rt_2_condition = load i256, ptr %stack_var_000, align 32 + %conditional_rt_2_condition_compared = icmp ne i256 %conditional_rt_2_condition, 0 + br i1 %conditional_rt_2_condition_compared, label %"block_rt_2/0", label %conditional_rt_2_join_block + +"block_rt_2/0": ; preds = %"block_rt_1/0" + store i256 0, ptr %stack_var_000, align 32 + %dup134 = load i256, ptr %stack_var_000, align 32 + store i256 %dup134, ptr %stack_var_001, align 32 + %argument_035 = load i256, ptr %stack_var_001, align 32 + %argument_136 = load i256, ptr %stack_var_000, align 32 + call void @__revert(i256 %argument_035, i256 %argument_136, i256 0) + unreachable + +"block_rt_2/1": ; preds = %conditional_rt_3_join_block + store i256 0, ptr %stack_var_001, align 32 + %dup137 = load i256, ptr %stack_var_001, align 32 + store i256 %dup137, ptr %stack_var_002, align 32 + %argument_038 = load i256, ptr %stack_var_002, align 32 + %argument_139 = load i256, ptr %stack_var_001, align 32 + call void @__revert(i256 %argument_038, i256 %argument_139, i256 0) + unreachable + +"block_rt_3/0": ; preds = %conditional_rt_2_join_block + store i256 4, ptr %stack_var_001, align 32 + store i256 5, ptr %stack_var_002, align 32 + br label %"block_rt_5/0" + +"block_rt_4/0": ; preds = %"block_rt_5/0" + store i256 64, ptr %stack_var_002, align 32 + %dup140 = load i256, ptr %stack_var_002, align 32 + store i256 %dup140, ptr %stack_var_003, align 32 + %argument_041 = load i256, ptr %stack_var_003, align 32 + %memory_load_pointer = inttoptr i256 %argument_041 to ptr addrspace(1) + %memory_load_result = load i256, ptr addrspace(1) %memory_load_pointer, align 1 + store i256 %memory_load_result, ptr %stack_var_003, align 32 + %swap2_top_value = load i256, ptr %stack_var_003, align 32 + %swap2_swap_value = load i256, ptr %stack_var_001, align 32 + store i256 %swap2_swap_value, ptr %stack_var_003, align 32 + store i256 %swap2_top_value, ptr %stack_var_001, align 32 + %dup3 = load i256, ptr %stack_var_001, align 32 + store i256 %dup3, ptr %stack_var_004, align 32 + %argument_042 = load i256, ptr %stack_var_004, align 32 + %argument_143 = load i256, ptr %stack_var_003, align 32 + %memory_store_pointer44 = inttoptr i256 %argument_042 to ptr addrspace(1) + store i256 %argument_143, ptr addrspace(1) %memory_store_pointer44, align 1 + %argument_045 = load i256, ptr %stack_var_002, align 32 + %memory_load_pointer46 = inttoptr i256 %argument_045 to ptr addrspace(1) + %memory_load_result47 = load i256, ptr addrspace(1) %memory_load_pointer46, align 1 + store i256 %memory_load_result47, ptr %stack_var_002, align 32 + %swap1_top_value = load i256, ptr %stack_var_002, align 32 + %swap1_swap_value = load i256, ptr %stack_var_001, align 32 + store i256 %swap1_swap_value, ptr %stack_var_002, align 32 + store i256 %swap1_top_value, ptr %stack_var_001, align 32 + %dup2 = load i256, ptr %stack_var_001, align 32 + store i256 %dup2, ptr %stack_var_003, align 32 + %swap1_top_value48 = load i256, ptr %stack_var_003, align 32 + %swap1_swap_value49 = load i256, ptr %stack_var_002, align 32 + store i256 %swap1_swap_value49, ptr %stack_var_003, align 32 + store i256 %swap1_top_value48, ptr %stack_var_002, align 32 + %argument_050 = load i256, ptr %stack_var_003, align 32 + %argument_151 = load i256, ptr %stack_var_002, align 32 + %subtraction_result = sub i256 %argument_050, %argument_151 + store i256 %subtraction_result, ptr %stack_var_002, align 32 + store i256 32, ptr %stack_var_003, align 32 + %argument_052 = load i256, ptr %stack_var_003, align 32 + %argument_153 = load i256, ptr %stack_var_002, align 32 + %addition_result = add i256 %argument_052, %argument_153 + store i256 %addition_result, ptr %stack_var_002, align 32 + %swap1_top_value54 = load i256, ptr %stack_var_002, align 32 + %swap1_swap_value55 = load i256, ptr %stack_var_001, align 32 + store i256 %swap1_swap_value55, ptr %stack_var_002, align 32 + store i256 %swap1_top_value54, ptr %stack_var_001, align 32 + %argument_056 = load i256, ptr %stack_var_002, align 32 + %argument_157 = load i256, ptr %stack_var_001, align 32 + call void @__return(i256 %argument_056, i256 %argument_157, i256 0) + unreachable + +"block_rt_5/0": ; preds = %"block_rt_3/0" + store i256 42, ptr %stack_var_002, align 32 + %swap1_top_value58 = load i256, ptr %stack_var_002, align 32 + %swap1_swap_value59 = load i256, ptr %stack_var_001, align 32 + store i256 %swap1_swap_value59, ptr %stack_var_002, align 32 + store i256 %swap1_top_value58, ptr %stack_var_001, align 32 + br label %"block_rt_4/0" + +conditional_dt_1_join_block: ; preds = %"block_dt_0/0" + store i256 0, ptr %stack_var_001, align 32 + %dup12 = load i256, ptr %stack_var_001, align 32 + store i256 %dup12, ptr %stack_var_002, align 32 + %argument_03 = load i256, ptr %stack_var_002, align 32 + %argument_14 = load i256, ptr %stack_var_001, align 32 + call void @__revert(i256 %argument_03, i256 %argument_14, i256 0) + unreachable + +conditional_rt_1_join_block: ; preds = %"block_rt_0/0" + store i256 0, ptr %stack_var_001, align 32 + %dup118 = load i256, ptr %stack_var_001, align 32 + store i256 %dup118, ptr %stack_var_002, align 32 + %argument_019 = load i256, ptr %stack_var_002, align 32 + %argument_120 = load i256, ptr %stack_var_001, align 32 + call void @__revert(i256 %argument_019, i256 %argument_120, i256 0) + unreachable + +conditional_rt_2_join_block: ; preds = %"block_rt_1/0" + store i256 0, ptr %stack_var_000, align 32 + %argument_025 = load i256, ptr %stack_var_000, align 32 + %calldata_pointer26 = load ptr addrspace(3), ptr @ptr_calldata, align 32 + %calldata_pointer_with_offset = getelementptr i8, ptr addrspace(3) %calldata_pointer26, i256 %argument_025 + %calldata_value = load i256, ptr addrspace(3) %calldata_pointer_with_offset, align 32 + store i256 %calldata_value, ptr %stack_var_000, align 32 + store i256 224, ptr %stack_var_001, align 32 + %argument_027 = load i256, ptr %stack_var_001, align 32 + %argument_128 = load i256, ptr %stack_var_000, align 32 + %shr_call = call i256 @__shr(i256 %argument_027, i256 %argument_128) + store i256 %shr_call, ptr %stack_var_000, align 32 + %dup129 = load i256, ptr %stack_var_000, align 32 + store i256 %dup129, ptr %stack_var_001, align 32 + store i256 1519042605, ptr %stack_var_002, align 32 + %argument_030 = load i256, ptr %stack_var_002, align 32 + %argument_131 = load i256, ptr %stack_var_001, align 32 + %comparison_result32 = icmp eq i256 %argument_030, %argument_131 + %comparison_result_extended33 = zext i1 %comparison_result32 to i256 + store i256 %comparison_result_extended33, ptr %stack_var_001, align 32 + store i256 3, ptr %stack_var_002, align 32 + %conditional_rt_3_condition = load i256, ptr %stack_var_001, align 32 + %conditional_rt_3_condition_compared = icmp ne i256 %conditional_rt_3_condition, 0 + br i1 %conditional_rt_3_condition_compared, label %"block_rt_3/0", label %conditional_rt_3_join_block + +conditional_rt_3_join_block: ; preds = %conditional_rt_2_join_block + br label %"block_rt_2/1" +} + +attributes #0 = { nounwind } +attributes #1 = { nounwind readnone } +attributes #2 = { nounwind readonly } +attributes #3 = { writeonly } +attributes #4 = { argmemonly nocallback nofree nounwind willreturn } +attributes #5 = { noprofile } +attributes #6 = { mustprogress nofree nounwind null_pointer_is_valid readnone willreturn } +attributes #7 = { mustprogress nofree nounwind null_pointer_is_valid willreturn } +attributes #8 = { nofree null_pointer_is_valid } +``` + +### Optimized LLVM IR + +The redundancy is optimized by LLVM, resulting in the optimized LLVM IR below. + +```txt +; Function Attrs: nofree noreturn null_pointer_is_valid +define i256 @__entry(ptr addrspace(3) %0, i256 %1, i256 %2, i256 %3, i256 %4, i256 %5, i256 %6, i256 %7, i256 %8, i256 %9, i256 %10, i256 %11) local_unnamed_addr #1 personality ptr @__personality { +entry: + store ptr addrspace(3) %0, ptr @ptr_calldata, align 32 + %abi_pointer_value = ptrtoint ptr addrspace(3) %0 to i256 + %abi_pointer_value_shifted = lshr i256 %abi_pointer_value, 96 + %abi_length_value = and i256 %abi_pointer_value_shifted, 4294967295 + store i256 %abi_length_value, ptr @calldatasize, align 32 + %is_deploy_code_call_flag_truncated = and i256 %1, 1 + %is_deploy_code_call_flag.not = icmp eq i256 %is_deploy_code_call_flag_truncated, 0 + store i256 128, ptr addrspace(1) inttoptr (i256 64 to ptr addrspace(1)), align 64 + %get_u128_value.i.i1 = tail call i256 @llvm.syncvm.getu128() + br i1 %is_deploy_code_call_flag.not, label %runtime_code_call_block, label %deploy_code_call_block + +deploy_code_call_block: ; preds = %entry + %comparison_result.i.i = icmp eq i256 %get_u128_value.i.i1, 0 + br i1 %comparison_result.i.i, label %"block_dt_1/0.i.i", label %"block_rt_2/0.i.i" + +"block_dt_1/0.i.i": ; preds = %deploy_code_call_block + store i256 32, ptr addrspace(2) inttoptr (i256 256 to ptr addrspace(2)), align 256 + store i256 0, ptr addrspace(2) inttoptr (i256 288 to ptr addrspace(2)), align 32 + tail call void @llvm.syncvm.return(i256 53919893334301279589334030174039261352344891250716429051063678533632) + unreachable + +"block_rt_2/0.i.i": ; preds = %runtime_code_call_block, %conditional_rt_2_join_block.i.i, %deploy_code_call_block + tail call void @llvm.syncvm.revert(i256 0) + unreachable + +runtime_code_call_block: ; preds = %entry + %comparison_result.i.i2 = icmp ne i256 %get_u128_value.i.i1, 0 + %calldatasize.i.i = load i256, ptr @calldatasize, align 32 + %comparison_result23.i.i = icmp ult i256 %calldatasize.i.i, 4 + %or.cond.i.i = select i1 %comparison_result.i.i2, i1 true, i1 %comparison_result23.i.i + br i1 %or.cond.i.i, label %"block_rt_2/0.i.i", label %conditional_rt_2_join_block.i.i + +"block_rt_3/0.i.i": ; preds = %conditional_rt_2_join_block.i.i + %memory_load_result.i.i = load i256, ptr addrspace(1) inttoptr (i256 64 to ptr addrspace(1)), align 64 + %memory_store_pointer44.i.i = inttoptr i256 %memory_load_result.i.i to ptr addrspace(1) + store i256 42, ptr addrspace(1) %memory_store_pointer44.i.i, align 1 + %memory_load_result47.i.i = load i256, ptr addrspace(1) inttoptr (i256 64 to ptr addrspace(1)), align 64 + %subtraction_result.i.i = add i256 %memory_load_result.i.i, 32 + %addition_result.i.i = sub i256 %subtraction_result.i.i, %memory_load_result47.i.i + %12 = tail call i256 @llvm.umin.i256(i256 %memory_load_result47.i.i, i256 4294967295) + %13 = tail call i256 @llvm.umin.i256(i256 %addition_result.i.i, i256 4294967295) + %offset_shifted.i.i.i.i = shl nuw nsw i256 %12, 64 + %length_shifted.i.i.i.i = shl nuw nsw i256 %13, 96 + %tmp.i.i.i.i = or i256 %length_shifted.i.i.i.i, %offset_shifted.i.i.i.i + tail call void @llvm.syncvm.return(i256 %tmp.i.i.i.i) + unreachable + +conditional_rt_2_join_block.i.i: ; preds = %runtime_code_call_block + %calldata_pointer26.i.i = load ptr addrspace(3), ptr @ptr_calldata, align 32 + %calldata_value.i.i = load i256, ptr addrspace(3) %calldata_pointer26.i.i, align 32 + %shift_res.i.mask.i.i = and i256 %calldata_value.i.i, -26959946667150639794667015087019630673637144422540572481103610249216 + %comparison_result32.i.i = icmp eq i256 %shift_res.i.mask.i.i, 40953307615929575801107647705360601464619672688377251939886941387873771847680 + br i1 %comparison_result32.i.i, label %"block_rt_3/0.i.i", label %"block_rt_2/0.i.i" +} + +attributes #0 = { nounwind } +attributes #1 = { nofree noreturn null_pointer_is_valid } +attributes #2 = { noreturn nounwind } +attributes #3 = { nocallback nofree nosync nounwind readnone speculatable willreturn } +``` + +### EraVM Assembly + +The optimized LLVM IR is translated into EraVM assembly below, allowing the size comparable to the Yul pipeline. + +```asm + .text + .file "default.sol:Test" + .globl __entry +__entry: +.func_begin0: + ptr.add r1, r0, stack[@ptr_calldata] + shr.s 96, r1, r1 + and @CPI0_0[0], r1, stack[@calldatasize] + add 128, r0, r1 + st.1 64, r1 + context.get_context_u128 r1 + and! 1, r2, r2 + jump.ne @.BB0_1 + sub! r1, r0, r1 + jump.ne @.BB0_3 + add stack[@calldatasize], r0, r1 + sub.s! 4, r1, r1 + jump.lt @.BB0_3 + ptr.add stack[@ptr_calldata], r0, r1 + ld r1, r1 + and @CPI0_2[0], r1, r1 + sub.s! @CPI0_3[0], r1, r1 + jump.ne @.BB0_3 + ld.1 64, r1 + add 42, r0, r2 + st.1 r1, r2 + ld.1 64, r2 + sub r1, r2, r1 + add 32, r1, r1 + add @CPI0_0[0], r0, r3 + sub.s! @CPI0_0[0], r1, r4 + add.ge r3, r0, r1 + sub.s! @CPI0_0[0], r2, r4 + add.ge r3, r0, r2 + shl.s 64, r2, r2 + shl.s 96, r1, r1 + or r1, r2, r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN +.BB0_1: + sub! r1, r0, r1 + jump.ne @.BB0_3 + add 32, r0, r1 + st.2 256, r1 + st.2 288, r0 + add @CPI0_1[0], r0, r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN +.BB0_3: + add r0, r0, r1 + ret.revert.to_label r1, @DEFAULT_FAR_REVERT +.func_end0: + + .data + .p2align 5 +calldatasize: + .cell 0 + + .p2align 5 +ptr_calldata: +.cell 0 + + .note.GNU-stack + .rodata +CPI0_0: + .cell 4294967295 +CPI0_1: + .cell 53919893334301279589334030174039261352344891250716429051063678533632 +CPI0_2: + .cell -26959946667150639794667015087019630673637144422540572481103610249216 +CPI0_3: + .cell 40953307615929575801107647705360601464619672688377251939886941387873771847680 +``` + +For comparison, the Yul pipeline of solc v0.8.21 produces the following EraVM assembly: + +```asm + .text + .file "default.sol:Test" + .globl __entry +__entry: +.func_begin0: + ptr.add r1, r0, stack[@ptr_calldata] + shr.s 96, r1, r1 + and @CPI0_0[0], r1, stack[@calldatasize] + add 128, r0, r1 + st.1 64, r1 + and! 1, r2, r1 + jump.ne @.BB0_1 + add stack[@calldatasize], r0, r1 + sub.s! 4, r1, r1 + jump.lt @.BB0_2 + ptr.add stack[@ptr_calldata], r0, r1 + ld r1, r1 + and @CPI0_2[0], r1, r1 + sub.s! @CPI0_3[0], r1, r1 + jump.ne @.BB0_2 + context.get_context_u128 r1 + sub! r1, r0, r1 + jump.ne @.BB0_2 + sub.s 4, r0, r1 + add stack[@calldatasize], r1, r1 + add @CPI0_4[0], r0, r2 + sub! r1, r0, r3 + add r0, r0, r3 + add.lt r2, r0, r3 + and @CPI0_4[0], r1, r1 + sub! r1, r0, r4 + add.le r0, r0, r2 + sub.s! @CPI0_4[0], r1, r1 + add r3, r0, r1 + add.eq r2, r0, r1 + sub! r1, r0, r1 + jump.ne @.BB0_2 + add 42, r0, r1 + st.1 128, r1 + add @CPI0_5[0], r0, r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN +.BB0_1: + context.get_context_u128 r1 + sub! r1, r0, r1 + jump.ne @.BB0_2 + add 32, r0, r1 + st.2 256, r1 + st.2 288, r0 + add @CPI0_1[0], r0, r1 + ret.ok.to_label r1, @DEFAULT_FAR_RETURN +.BB0_2: + add r0, r0, r1 + ret.revert.to_label r1, @DEFAULT_FAR_REVERT +.func_end0: + + .data + .p2align 5 +calldatasize: + .cell 0 + + .p2align 5 +ptr_calldata: +.cell 0 + + .note.GNU-stack + .rodata +CPI0_0: + .cell 4294967295 +CPI0_1: + .cell 53919893334301279589334030174039261352344891250716429051063678533632 +CPI0_2: + .cell -26959946667150639794667015087019630673637144422540572481103610249216 +CPI0_3: + .cell 40953307615929575801107647705360601464619672688377251939886941387873771847680 +CPI0_4: + .cell -57896044618658097711785492504343953926634992332820282019728792003956564819968 +CPI0_5: + .cell 2535301202817642044428229017600 +``` diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/10.index.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/10.index.md new file mode 100644 index 00000000..55298158 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/10.index.md @@ -0,0 +1,48 @@ +--- +title: Instruction Reference +description: +--- + +In this specification, instructions are grouped by their relevance to the EVM instruction set: + +- [Native EVM instructions](instructions/evm). +- [Yul auxiliary instructions](instructions/yul). +- [EVM legacy assembly auxiliary instructions](instructions/evmla). + +Most of the EVM native instructions are represented in both Yul and EVM legacy assembly IRs. If they are not, it is +stated explicitly in the description of each instruction. + +## Addressing modes + +EraVM is a register-based virtual machine with different addressing modes. +It overrides all stack mechanics described in [the original EVM opcodes documentation](https://www.evm.codes/) including +errors they produce on EVM. + +## Solidity Intermediate Representations (IRs) + +Every instruction is translated via two IRs available in the Solidity compiler unless stated otherwise: + +1. Yul +2. EVM legacy assembly + +## Yul Extensions + +At the moment there is no way of adding zkSync-specific instructions to Yul as long as we use the official Solidity +compiler, which would produce an error on an unknown instruction. + +There are two ways of supporting such instructions: one for Solidity and one for Yul. + +### The Solidity Mode + +In Solidity we have introduced **call simulations**. They are not actual calls, as they are substituted by our Yul +translator with the needed instruction, depending on the constant address. This way the Solidity compiler is not +optimizing them out and is not emitting compilation errors. + +The reference of such extensions is coming soon. + +### The Yul Mode + +The non-call zkSync-specific instructions are only available in the Yul mode of **zksolc**. +To have better compatibility, they are implemented as `verbatim` instructions with some predefined keys. + +The reference of such extensions is coming soon. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/10.index.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/10.index.md new file mode 100644 index 00000000..ca87f8d4 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/10.index.md @@ -0,0 +1,29 @@ +--- +title: Overview +description: +--- + +## Native EVM Instructions + +Such instructions are grouped into the following categories according to [the original reference](https://www.evm.codes/): + +- [Arithmetic](evm/arithmetic) +- [Logical](evm/logical) +- [Bitwise](evm/bitwise) +- [Hashes](evm/hashes) +- [Environment](evm/environment) +- [Block](evm/block) +- [Stack](evm/stack) +- [Memory](evm/memory) +- [Storage](evm/storage) +- [Events](evm/events) +- [Calls](evm/calls) +- [Create](evm/create) +- [Return](evm/return) + +### zkSync VM Assembly + +Assembly emitted for LLVM standard library functions depends on available optimizations which differ between versions. If there is no +assembly example under an instruction, compile a reproducing contract with the latest version of `zksolc`. + +zkSync VM specification contains a list of [all zkSync VM instructions (see the table of contents)](%%zk_git_repo_eravm-spec%%/spec.html). diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/arithmetic.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/arithmetic.md new file mode 100644 index 00000000..28bfedf9 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/arithmetic.md @@ -0,0 +1,369 @@ +--- +title: Arithmetic +description: +--- + +## ADD + +Original [EVM](https://www.evm.codes/#01?fork=shanghai) instruction. + +### LLVM IR + +```txt +%addition_result = add i256 %value1, %value2 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L15) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#add-instruction) + +### EraVM Assembly + +```asm +add r1, r2, r1 +``` + +For more detail, see the [EraVM specification reference](%%zk_git_repo_eravm-spec%%/spec.html#AddDefinition) + +## MUL + +Original [EVM](https://www.evm.codes/#02?fork=shanghai) instruction. + +### Differences from EVM + +1. The carry is written to the 2nd output register + +### LLVM IR + +```txt +%multiplication_result = mul i256 %value1, %value2 +``` + +EraVM can output the carry of the multiplication operation. +In this case, the result is a tuple of two values: the multiplication result and the carry. +The carry is written to the 2nd output register. +The snippet below returns the carry value. + +```txt +%value1_extended = zext i256 %value1 to i512 +%value2_extended = zext i256 %value2 to i512 +%result_extended = mul nuw i512 %value1_extended, %value2_extended +%result_shifted = lshr i512 %result_extended, 256 +%result = trunc i512 %result_shifted to i256 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L53) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#mul-instruction) + +### EraVM Assembly + +```asm +mul r1, r2, r1, r2 +``` + +For more detail, see the [EraVM specification reference](%%zk_git_repo_eravm-spec%%/spec.html#MulDefinition) + +## SUB + +Original [EVM](https://www.evm.codes/#03?fork=shanghai) instruction. + +### LLVM IR + +```txt +%subtraction_result = sub i256 %value1, %value2 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L34) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#sub-instruction) + +### EraVM Assembly + +```asm +sub r1, r2, r1 +``` + +For more detail, see the [EraVM specification reference](%%zk_git_repo_eravm-spec%%/spec.html#SubDefinition) + +## DIV + +Original [EVM](https://www.evm.codes/#04?fork=shanghai) instruction. + +### Differences from EVM + +1. The remainder is written to the 2nd output register + +### LLVM IR + +```text +define i256 @__div(i256 %arg1, i256 %arg2) #0 { +entry: + %is_divider_zero = icmp eq i256 %arg2, 0 + br i1 %is_divider_zero, label %return, label %division + +division: + %div_res = udiv i256 %arg1, %arg2 + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %div_res, %division ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L73) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#udiv-instruction) + +For more detail, see the [EraVM specification reference](%%zk_git_repo_eravm-spec%%/spec.html#DivDefinition) + +## SDIV + +Original [EVM](https://www.evm.codes/#05?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__sdiv(i256 %arg1, i256 %arg2) #0 { +entry: + %is_divider_zero = icmp eq i256 %arg2, 0 + br i1 %is_divider_zero, label %return, label %division_overflow + +division_overflow: + %is_divided_int_min = icmp eq i256 %arg1, -57896044618658097711785492504343953926634992332820282019728792003956564819968 + %is_minus_one = icmp eq i256 %arg2, -1 + %is_overflow = and i1 %is_divided_int_min, %is_minus_one + br i1 %is_overflow, label %return, label %division + +division: + %div_res = sdiv i256 %arg1, %arg2 + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %arg1, %division_overflow ], [ %div_res, %division ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L162) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#sdiv-instruction) + +EraVM does not have a similar instruction. + +## MOD + +Original [EVM](https://www.evm.codes/#06?fork=shanghai) instruction. + +### Differences from EVM + +1. The remainder is written to the 2nd output register + +### LLVM IR + +```txt +define i256 @__mod(i256 %arg1, i256 %arg2) #0 { +entry: + %is_divider_zero = icmp eq i256 %arg2, 0 + br i1 %is_divider_zero, label %return, label %remainder + +remainder: + %rem_res = urem i256 %arg1, %arg2 + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %rem_res, %remainder ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L117) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#urem-instruction) + +For more detail, see the [EraVM specification reference](%%zk_git_repo_eravm-spec%%/spec.html#DivDefinition) + +## SMOD + +Original [EVM](https://www.evm.codes/#07?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__smod(i256 %arg1, i256 %arg2) #0 { +entry: + %is_divider_zero = icmp eq i256 %arg2, 0 + br i1 %is_divider_zero, label %return, label %division_overflow + +division_overflow: + %is_divided_int_min = icmp eq i256 %arg1, -57896044618658097711785492504343953926634992332820282019728792003956564819968 + %is_minus_one = icmp eq i256 %arg2, -1 + %is_overflow = and i1 %is_divided_int_min, %is_minus_one + br i1 %is_overflow, label %return, label %remainder + +remainder: + %rem_res = srem i256 %arg1, %arg2 + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ 0, %division_overflow ], [ %rem_res, %remainder ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/arithmetic.rs#L236) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#srem-instruction) + +EraVM does not have a similar instruction. + +## ADDMOD + +Original [EVM](https://www.evm.codes/#08?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__addmod(i256 %arg1, i256 %arg2, i256 %modulo) #0 { +entry: + %is_zero = icmp eq i256 %modulo, 0 + br i1 %is_zero, label %return, label %addmod + +addmod: + %arg1m = urem i256 %arg1, %modulo + %arg2m = urem i256 %arg2, %modulo + %res = call {i256, i1} @llvm.uadd.with.overflow.i256(i256 %arg1m, i256 %arg2m) + %sum = extractvalue {i256, i1} %res, 0 + %obit = extractvalue {i256, i1} %res, 1 + %sum.mod = urem i256 %sum, %modulo + br i1 %obit, label %overflow, label %return + +overflow: + %mod.inv = xor i256 %modulo, -1 + %sum1 = add i256 %sum, %mod.inv + %sum.ovf = add i256 %sum1, 1 + br label %return + +return: + %value = phi i256 [0, %entry], [%sum.mod, %addmod], [%sum.ovf, %overflow] + ret i256 %value +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/math.rs#L16) is common for Yul and EVMLA representations. + +EraVM does not have a similar instruction. + +## MULMOD + +Original [EVM](https://www.evm.codes/#09?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__mulmod(i256 %arg1, i256 %arg2, i256 %modulo) #0 { +entry: + %cccond = icmp eq i256 %modulo, 0 + br i1 %cccond, label %ccret, label %entrycont + +ccret: + ret i256 0 + +entrycont: + %arg1m = urem i256 %arg1, %modulo + %arg2m = urem i256 %arg2, %modulo + %less_then_2_128 = icmp ult i256 %modulo, 340282366920938463463374607431768211456 + br i1 %less_then_2_128, label %fast, label %slow + +fast: + %prod = mul i256 %arg1m, %arg2m + %prodm = urem i256 %prod, %modulo + ret i256 %prodm + +slow: + %arg1e = zext i256 %arg1m to i512 + %arg2e = zext i256 %arg2m to i512 + %prode = mul i512 %arg1e, %arg2e + %prodl = trunc i512 %prode to i256 + %prodeh = lshr i512 %prode, 256 + %prodh = trunc i512 %prodeh to i256 + %res = call i256 @__ulongrem(i256 %prodl, i256 %prodh, i256 %modulo) + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/math.rs#L43) is common for Yul and EVMLA representations. + +EraVM does not have a similar instruction. + +## EXP + +Original [EVM](https://www.evm.codes/#0a?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__exp(i256 %value, i256 %exp) "noinline-oz" #0 { +entry: + %exp_is_non_zero = icmp eq i256 %exp, 0 + br i1 %exp_is_non_zero, label %return, label %exponent_loop_body + +return: + %exp_res = phi i256 [ 1, %entry ], [ %exp_res.1, %exponent_loop_body ] + ret i256 %exp_res + +exponent_loop_body: + %exp_res.2 = phi i256 [ %exp_res.1, %exponent_loop_body ], [ 1, %entry ] + %exp_val = phi i256 [ %exp_val_halved, %exponent_loop_body ], [ %exp, %entry ] + %val_squared.1 = phi i256 [ %val_squared, %exponent_loop_body ], [ %value, %entry ] + %odd_test = and i256 %exp_val, 1 + %is_exp_odd = icmp eq i256 %odd_test, 0 + %exp_res.1.interm = select i1 %is_exp_odd, i256 1, i256 %val_squared.1 + %exp_res.1 = mul i256 %exp_res.1.interm, %exp_res.2 + %val_squared = mul i256 %val_squared.1, %val_squared.1 + %exp_val_halved = lshr i256 %exp_val, 1 + %exp_val_is_less_2 = icmp ult i256 %exp_val, 2 + br i1 %exp_val_is_less_2, label %return, label %exponent_loop_body +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/math.rs#L70) is common for Yul and EVMLA representations. + +EraVM does not have a similar instruction. + +## SIGNEXTEND + +Original [EVM](https://www.evm.codes/#0b?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__signextend(i256 %numbyte, i256 %value) #0 { +entry: + %is_overflow = icmp uge i256 %numbyte, 31 + br i1 %is_overflow, label %return, label %signextend + +signextend: + %numbit_byte = mul nuw nsw i256 %numbyte, 8 + %numbit = add nsw nuw i256 %numbit_byte, 7 + %numbit_inv = sub i256 256, %numbit + %signmask = shl i256 1, %numbit + %valmask = lshr i256 -1, %numbit_inv + %ext1 = shl i256 -1, %numbit + %signv = and i256 %signmask, %value + %sign = icmp ne i256 %signv, 0 + %valclean = and i256 %value, %valmask + %sext = select i1 %sign, i256 %ext1, i256 0 + %result = or i256 %sext, %valclean + br label %return + +return: + %signext_res = phi i256 [%value, %entry], [%result, %signextend] + ret i256 %signext_res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/math.rs#L93) is common for Yul and EVMLA representations. + +EraVM does not have a similar instruction. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/bitwise.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/bitwise.md new file mode 100644 index 00000000..fa533b4f --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/bitwise.md @@ -0,0 +1,218 @@ +--- +title: Bitwise +description: +--- + +## AND + +Original [EVM](https://www.evm.codes/#16?fork=shanghai) instruction. + +### LLVM IR + +```txt +%and_result = and i256 %value1, %value2 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L47) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#and-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +and r1, r2, r1 +st.1 128, r1 +``` + +[EraVM instruction: `and`](%%zk_git_repo_eravm-spec%%/spec.html#AndDefinition) + +## OR + +Original [EVM](https://www.evm.codes/#17?fork=shanghai) instruction. + +### LLVM IR + +```txt +%or_result = or i256 %value1, %value2 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L13) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#or-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +or r1, r2, r1 +st.1 128, r1 +``` + +[EraVM instruction: `or`](%%zk_git_repo_eravm-spec%%/spec.html#AndDefinition) + +## XOR + +Original [EVM](https://www.evm.codes/#18?fork=shanghai) instruction. + +### LLVM IR + +```txt +%xor_result = or i256 %value1, %value2 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L30) is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#xor-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +xor r1, r2, r1 +st.1 128, r1 +``` + +[EraVM instruction: `xor`](%%zk_git_repo_eravm-spec%%/spec.html#XorDefinition) + +## NOT + +Original [EVM](https://www.evm.codes/#19?fork=shanghai) instruction. + +### LLVM IR + +```txt +%xor_result = xor i256 %value, -1 +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L30) is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r1, r1 +ld r1, r1 +sub.s 1, r0, r2 +xor r1, r2, r1 +st.1 128, r1 +``` + +[EraVM instruction: `xor`](%%zk_git_repo_eravm-spec%%/spec.html#XorDefinition) + +## BYTE + +Original [EVM](https://www.evm.codes/#1a?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__byte(i256 %index, i256 %value) #0 { +entry: + %is_overflow = icmp ugt i256 %index, 31 + br i1 %is_overflow, label %return, label %extract_byte + +extract_byte: + %bits_offset = shl i256 %index, 3 + %value_shifted_left = shl i256 %value, %bits_offset + %value_shifted_right = lshr i256 %value_shifted_left, 248 + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %value_shifted_right, %extract_byte ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L229) is common for Yul and EVMLA representations. + +## SHL + +Original [EVM](https://www.evm.codes/#1b?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__shl(i256 %shift, i256 %value) #0 { +entry: + %is_overflow = icmp ugt i256 %shift, 255 + br i1 %is_overflow, label %return, label %shift_value + +shift_value: + %shift_res = shl i256 %value, %shift + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %shift_res, %shift_value ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L67) is common for Yul and EVMLA representations. + +## SHR + +Original [EVM](https://www.evm.codes/#1c?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__shr(i256 %shift, i256 %value) #0 { +entry: + %is_overflow = icmp ugt i256 %shift, 255 + br i1 %is_overflow, label %return, label %shift_value + +shift_value: + %shift_res = lshr i256 %value, %shift + br label %return + +return: + %res = phi i256 [ 0, %entry ], [ %shift_res, %shift_value ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L111) is common for Yul and EVMLA representations. + +[EraVM instruction: `xor`](%%zk_git_repo_eravm-spec%%/spec.html#XorDefinition) + +## SAR + +Original [EVM](https://www.evm.codes/#1d?fork=shanghai) instruction. + +### LLVM IR + +```txt +define i256 @__sar(i256 %shift, i256 %value) #0 { +entry: + %is_overflow = icmp ugt i256 %shift, 255 + br i1 %is_overflow, label %arith_overflow, label %shift_value + +arith_overflow: + %is_val_positive = icmp sge i256 %value, 0 + %res_overflow = select i1 %is_val_positive, i256 0, i256 -1 + br label %return + +shift_value: + %shift_res = ashr i256 %value, %shift + br label %return + +return: + %res = phi i256 [ %res_overflow, %arith_overflow ], [ %shift_res, %shift_value ] + ret i256 %res +} +``` + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/bitwise.rs#L157) is common for Yul and EVMLA representations. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/block.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/block.md new file mode 100644 index 00000000..33695804 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/block.md @@ -0,0 +1,156 @@ +--- +title: Block +description: +--- + +## BLOCKHASH + +Original [EVM](https://www.evm.codes/#40?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L47) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## COINBASE + +Original [EVM](https://www.evm.codes/#41?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L150) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## TIMESTAMP + +Original [EVM](https://www.evm.codes/#42?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L98) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## NUMBER + +Original [EVM](https://www.evm.codes/#43?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L81) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## PREVRANDAO + +Original [EVM](https://www.evm.codes/#44?fork=shanghai) instruction. | DIFFICULTY + +Original [EVM](https://www.evm.codes/#44?fork=grayGlacier) + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L133) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## GASLIMIT + +Original [EVM](https://www.evm.codes/#45?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L13) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## CHAINID + +Original [EVM](https://www.evm.codes/#46?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L64) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## SELFBALANCE + +Original [EVM](https://www.evm.codes/#47?fork=shanghai) instruction. + +Implemented as [BALANCE](environment#balance) with an [ADDRESS](environment#address) as its argument. + +## BASEFEE + +Original [EVM](https://www.evm.codes/#48?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L167) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/calls.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/calls.md new file mode 100644 index 00000000..ab1eed5e --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/calls.md @@ -0,0 +1,41 @@ +--- +title: Calls +description: +--- + +All EVM call instructions are handled similarly. + +The call type is encoded on the assembly level, so we will describe the common handling workflow, mentioning distinctions if there are any. + +For more information, see the +[zkSync Era documentation](/build/developer-reference/ethereum-differences/evm-instructions). + +## CALL + +Original [EVM](https://www.evm.codes/#f1?fork=shanghai) instruction. + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/call.rs#L530) +is common for Yul and EVMLA representations. + +The code checks if the call is non-static and the Ether value is non-zero. If so, the call is redirected to the [MsgValueSimulator](/zk-stack/components/compiler/specification/system-contracts#ether-value-simulator). + +- [EraVM instruction: `call` (near call)](https://matter-labs.github.io/eravm-spec/spec.html#NearCallDefinition) +- [EraVM instruction: `far_call`](https://matter-labs.github.io/eravm-spec/spec.html#FarCalls) + +## DELEGATECALL + +Original [EVM](https://www.evm.codes/#f4?fork=shanghai) instruction. + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/call.rs#L530) +is common for Yul and EVMLA representations. + +[EraVM instruction: `far_call`](https://matter-labs.github.io/eravm-spec/spec.html#FarCalls) + +## STATICCALL + +Original [EVM](https://www.evm.codes/#fa?fork=shanghai) instruction. + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/call.rs#L530) +is common for Yul and EVMLA representations. + +[EraVM instruction: `far_call`](https://matter-labs.github.io/eravm-spec/spec.html#FarCalls) diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/create.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/create.md new file mode 100644 index 00000000..2fb65181 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/create.md @@ -0,0 +1,22 @@ +--- +title: Create +description: +--- + +The EVM CREATE instructions are handled similarly. + +For more information, see the [zkSync Era documentation](/build/developer-reference/ethereum-differences/evm-instructions#create-create2). + +## CREATE + +Original [EVM](https://www.evm.codes/#f0?fork=shanghai) instruction. + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/create.rs#L19) +is common for Yul and EVMLA representations. + +## CREATE2 + +Original [EVM](https://www.evm.codes/#f5?fork=shanghai) instruction. + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/create.rs#L57) +is common for Yul and EVMLA representations. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/environment.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/environment.md new file mode 100644 index 00000000..872424be --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/environment.md @@ -0,0 +1,332 @@ +--- +title: Environment +--- + +## ADDRESS + +Original [EVM](https://www.evm.codes/#30?fork=shanghai) instruction. + +This value is fetched with a native [EraVM instruction: `context.this`](https://matter-labs.github.io/eravm-spec/spec.html#ContextDefinitions). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L973) +is common for Yul and EVMLA representations. + +## BALANCE + +Original [EVM](https://www.evm.codes/#31?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [L2EthToken](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/L2EthToken.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/ether_gas.rs#L39) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## ORIGIN + +Original [EVM](https://www.evm.codes/#32?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L47) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## CALLER + +Original [EVM](https://www.evm.codes/#33?fork=shanghai) instruction. + +This value is fetched with a native [EraVM instruction: `context.caller`](https://matter-labs.github.io/eravm-spec/spec.html#ContextDefinitions). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L974) +is common for Yul and EVMLA representations. + +## CALLVALUE + +Original [EVM](https://www.evm.codes/#34?fork=shanghai) instruction. + +This value is fetched with a native [EraVM instruction: `context.get_context_u128`](https://matter-labs.github.io/eravm-spec/spec.html#ContextDefinitions). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/ether_gas.rs#L25) +is common for Yul and EVMLA representations. + +## CALLDATALOAD + +Original [EVM](https://www.evm.codes/#35?fork=shanghai) instruction. + +Calldata is accessed with a generic memory access instruction, but the memory chunk itself is a reference +to the calling contract's heap. +A fat pointer to the parent contract is passed via ABI using registers. + +Then, the pointer [is saved to a global stack variable](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/entry.rs#L129) +accessible from anywhere in the contract. + +### LLVM IR + +```txt +@ptr_calldata = private unnamed_addr global ptr addrspace(3) null ; global variable declaration +... +store ptr addrspace(3) %0, ptr @ptr_calldata, align 32 ; saving the pointer from `r1` to the global variable +... +%calldata_pointer = load ptr addrspace(3), ptr @ptr_calldata, align 32 ; loading the pointer from the global variable to `calldata_pointer` +%calldata_value = load i256, ptr addrspace(3) %calldata_pointer, align 32 ; loading the value from the calldata pointer +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/calldata.rs#L14) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +ptr.add r1, r0, stack[@ptr_calldata] ; saving the pointer from `r1` to the global variable +... +ptr.add stack[@ptr_calldata], r0, r1 ; loading the pointer from the global variable to `r1` +ld r1, r1 ; loading the value to `r1` +``` + +- [EraVM instruction: `ptr.add`](https://matter-labs.github.io/eravm-spec/spec.html#PtrAddDefinition) +- [EraVM fat pointers](https://matter-labs.github.io/eravm-spec/spec.html#PointerDefinitions) +- [EraVM memory forwarding mechanism](https://matter-labs.github.io/eravm-spec/spec.html#MemoryForwarding) + +## CALLDATASIZE + +Original [EVM](https://www.evm.codes/#36?fork=shanghai) instruction. + +Calldata size is stored in the fat pointer passed from the parent contract (see [CALLDATALOAD](#calldataload)). + +The size value can be extracted with bitwise operations as illustrated below. + +### LLVM IR + +```txt +@calldatasize = private unnamed_addr global i256 0 ; global variable declaration +... +%abi_pointer_value = ptrtoint ptr addrspace(3) %0 to i256 ; converting the pointer to an integer +%abi_pointer_value_shifted = lshr i256 %abi_pointer_value, 96 ; shifting the integer right 96 bits +%abi_length_value = and i256 %abi_pointer_value_shifted, 4294967295 ; keeping the lowest 32 bits of the integer +store i256 %abi_length_value, ptr @calldatasize, align 32 ; saving the value to the global variable +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/calldata.rs#L40) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +ptr.add r1, r0, stack[@ptr_calldata] ; saving the pointer from `r1` to the global variable +shr.s 96, r1, r1 ; shifting the integer right 96 bits +and @CPI0_0[0], r1, stack[@calldatasize] ; keeping the lowest 32 bits of the integer, saving the value to the global variable +... +CPI0_0: + .cell 4294967295 +``` + +- [EraVM instruction: `ptr.add`](https://matter-labs.github.io/eravm-spec/spec.html#PtrAddDefinition) +- [EraVM fat pointers](https://matter-labs.github.io/eravm-spec/spec.html#PointerDefinitions) +- [EraVM memory forwarding mechanism](https://matter-labs.github.io/eravm-spec/spec.html#MemoryForwarding) + +## CALLDATACOPY + +Original [EVM](https://www.evm.codes/#37?fork=shanghai) instruction. + +Unlike on EVM, on EraVM it is a simple loop over [CALLDATALOAD](#calldataload)). + +### LLVM IR + +```txt +; loading the pointer from the global variable to `calldata_pointer` +%calldata_pointer = load ptr addrspace(3), ptr @ptr_calldata, align 32 +; shifting the pointer by 122 bytes +%calldata_source_pointer = getelementptr i8, ptr addrspace(3) %calldata_pointer, i256 122 +; copying 64 bytes from calldata at offset 122 to the heap at offset 128 +call void @llvm.memcpy.p1.p3.i256(ptr addrspace(1) align 1 inttoptr (i256 128 to ptr addrspace(1)), ptr addrspace(3) align 1 %calldata_source_pointer, i256 64, i1 false) +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/calldata.rs#L54) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +.BB0_3: + shl.s 5, r2, r3 ; shifting the offset by 32 + ptr.add r1, r3, r4 ; adding the offset to the calldata pointer + ld r4, r4 ; reading the calldata value + add 128, r3, r3 ; adding the offset to the heap pointer + st.1 r3, r4 ; writing the calldata value to the heap + add 1, r2, r2 ; incrementing the offset + sub.s! 2, r2, r3 ; checking the bounds + jump.lt @.BB0_3 ; loop continuation branching +``` + +- [EraVM instruction: `ptr.add`](https://matter-labs.github.io/eravm-spec/spec.html#PtrAddDefinition) +- [EraVM fat pointers](https://matter-labs.github.io/eravm-spec/spec.html#PointerDefinitions) +- [EraVM memory forwarding mechanism](https://matter-labs.github.io/eravm-spec/spec.html#MemoryForwarding) + +## CODECOPY + +Original [EVM](https://www.evm.codes/#38?fork=shanghai) instruction. + +See [the EraVM docs](/build/developer-reference/ethereum-differences/evm-instructions#codecopy). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L856). + +## CODESIZE + +Original [EVM](https://www.evm.codes/#39?fork=shanghai) instruction. + +See [the EraVM docs](/build/developer-reference/ethereum-differences/evm-instructions#codesize). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L837). + +## GASPRICE + +Original [EVM](https://www.evm.codes/#3a?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [SystemContext](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/SystemContext.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/context.rs#L30) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## EXTCODESIZE + +Original [EVM](https://www.evm.codes/#3b?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [AccountCodeStorage](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/AccountCodeStorage.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/ext_code.rs#L11) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. + +## EXTCODECOPY + +Original [EVM](https://www.evm.codes/#3c?fork=shanghai) instruction. + +Not supported. Triggers a compile-time error. + +## RETURNDATASIZE + +Original [EVM](https://www.evm.codes/#3d?fork=shanghai) instruction. + +Return data size is read from the fat pointer returned from the child contract. + +The size value can be extracted with bitwise operations as illustrated below. + +### LLVM IR + +```txt +%contract_call_external = tail call { ptr addrspace(3), i1 } @__farcall(i256 0, i256 0, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef) +%contract_call_external_result_abi_data = extractvalue { ptr addrspace(3), i1 } %contract_call_external, 0 +%contract_call_memcpy_from_child_pointer_casted = ptrtoint ptr addrspace(3) %contract_call_external_result_abi_data to i256 +%contract_call_memcpy_from_child_return_data_size_shifted = lshr i256 %contract_call_memcpy_from_child_pointer_casted, 96 +%contract_call_memcpy_from_child_return_data_size_truncated = and i256 %contract_call_memcpy_from_child_return_data_size_shifted, 4294967295 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return_data.rs#L16) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +near_call r0, @__farcall, @DEFAULT_UNWIND ; calling a child contract +shr.s 96, r1, r1 ; shifting the pointer value right 96 bits +and @CPI0_1[0], r1, r1 ; keeping the lowest 32 bits of the pointer value +... +CPI0_1: + .cell 4294967295 +``` + +[EraVM instruction: `call`](https://matter-labs.github.io/eravm-spec/spec.html#NearCallDefinition) + +## RETURNDATACOPY + +Original [EVM](https://www.evm.codes/#3e?fork=shanghai) instruction. + +Unlike on EVM, on EraVM it is a simple loop over memory operations on 256-bit values. + +### LLVM IR + +```txt +; loading the pointer from the global variable to `return_data_pointer` +%return_data_pointer = load ptr addrspace(3), ptr @ptr_return_data, align 32 +; shifting the pointer by 122 bytes +%return_data_source_pointer = getelementptr i8, ptr addrspace(3) %return_data_pointer, i256 122 +; copying 64 bytes from return data at offset 122 to the heap at offset 128 +call void @llvm.memcpy.p1.p3.i256(ptr addrspace(1) align 1 inttoptr (i256 128 to ptr addrspace(1)), ptr addrspace(3) align 1 %return_data_source_pointer, i256 64, i1 false) +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return_data.rs#L31) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +.BB0_3: + shl.s 5, r2, r3 ; shifting the offset by 32 + ptr.add r1, r3, r4 ; adding the offset to the return data pointer + ld r4, r4 ; reading the return data value + add 128, r3, r3 ; adding the offset to the heap pointer + st.1 r3, r4 ; writing the return data value to the heap + add 1, r2, r2 ; incrementing the offset + sub.s! 2, r2, r3 ; checking the bounds + jump.lt @.BB0_3 ; loop continuation branching +``` + +- [EraVM instruction: `jump`](https://matter-labs.github.io/eravm-spec/spec.html#JumpDefinition) +- [EraVM instruction predication](https://matter-labs.github.io/eravm-spec/spec.html#Predication) + +## EXTCODEHASH + +Original [EVM](https://www.evm.codes/#3f?fork=shanghai) instruction. + +### System Contract + +This information is requested a System Contract called [AccountCodeStorage](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/AccountCodeStorage.sol). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#environmental-data-storage). + +### LLVM IR + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/ext_code.rs#L29) +is common for Yul and EVMLA representations. + +The request to the System Contract is done via the +[SystemRequest](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/context/function/runtime/system_request.rs) runtime function. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/events.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/events.md new file mode 100644 index 00000000..4fd07e65 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/events.md @@ -0,0 +1,29 @@ +--- +title: Event +description: +--- + +The EraVM event instructions are more low-level. Each `LOG`-like instruction is unrolled into a loop, where each iteration writes two 256-bit words. + +The words must contain data in the following order: + +1. The initializer cell, describing the number of indexed words (e.g. `I`) and the size of non-indexed data in bytes (e.g. `D`) +2. `I` indexed 32-byte words +3. `D` bytes of data + +Each write operation can contain some subsequent data from its next step. If only one word remains, the second input is zero. + +See [EraVM instruction: `log.event`](https://matter-labs.github.io/eravm-spec/spec.html#EventDefinition) + +## LOG0 - LOG4 + +Original [EVM](https://www.evm.codes/#a0?fork=shanghai) instructions. + +### System Contract + +This information is requested a System Contract called [EventWriter](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/EventWriter.yul). + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#event-handler). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/event.rs#L20) +is common for Yul and EVMLA representations. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/hashes.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/hashes.md new file mode 100644 index 00000000..610e047f --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/hashes.md @@ -0,0 +1,54 @@ +--- +title: Hashes +description: +--- + +## SHA3 + +Original [EVM](https://www.evm.codes/#20?fork=shanghai) instruction. + +### System Contract + +This instruction is handled by a System Contract called [Keccak256](https://github.com/matter-labs/era-system-contracts/blob/main/contracts/precompiles/Keccak256.yul), +which is a wrapper around the EraVM precompile. + +On how the System Contract is called, see [this section](/zk-stack/components/compiler/specification/system-contracts#keccak256-hash-function). + +### LLVM IR + +```txt +define i256 @__sha3(i8 addrspace(1)* nocapture nofree noundef %0, i256 %1, i1 %throw_at_failure) "noinline-oz" #1 personality i32()* @__personality { +entry: + %addr_int = ptrtoint i8 addrspace(1)* %0 to i256 + %2 = tail call i256 @llvm.umin.i256(i256 %addr_int, i256 4294967295) + %3 = tail call i256 @llvm.umin.i256(i256 %1, i256 4294967295) + %gas_left = tail call i256 @llvm.syncvm.gasleft() + %4 = tail call i256 @llvm.umin.i256(i256 %gas_left, i256 4294967295) + %abi_data_input_offset_shifted = shl nuw nsw i256 %2, 64 + %abi_data_input_length_shifted = shl nuw nsw i256 %3, 96 + %abi_data_gas_shifted = shl nuw nsw i256 %4, 192 + %abi_data_offset_and_length = add i256 %abi_data_input_length_shifted, %abi_data_input_offset_shifted + %abi_data_add_gas = add i256 %abi_data_gas_shifted, %abi_data_offset_and_length + %abi_data_add_system_call_marker = add i256 %abi_data_add_gas, 904625697166532776746648320380374280103671755200316906558262375061821325312 + %call_external = tail call { i8 addrspace(3)*, i1 } @__staticcall(i256 %abi_data_add_system_call_marker, i256 32784, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef, i256 undef) + %status_code = extractvalue { i8 addrspace(3)*, i1 } %call_external, 1 + br i1 %status_code, label %success_block, label %failure_block + +success_block: + %abi_data_pointer = extractvalue { i8 addrspace(3)*, i1 } %call_external, 0 + %data_pointer = bitcast i8 addrspace(3)* %abi_data_pointer to i256 addrspace(3)* + %keccak256_child_data = load i256, i256 addrspace(3)* %data_pointer, align 1 + ret i256 %keccak256_child_data + +failure_block: + br i1 %throw_at_failure, label %throw_block, label %revert_block + +revert_block: + call void @__revert(i256 0, i256 0, i256 0) + unreachable + +throw_block: + call void @__cxa_throw(i8* noalias nocapture nofree align 32 null, i8* noalias nocapture nofree align 32 undef, i8* noalias nocapture nofree align 32 undef) + unreachable +} +``` diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/logical.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/logical.md new file mode 100644 index 00000000..a0c3873c --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/logical.md @@ -0,0 +1,205 @@ +--- +title: Logical +description: +--- + +## LT + +Original [EVM](https://www.evm.codes/#10?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp ult i256 %value1, %value2 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +sub! r1, r2, r1 +add 0, r0, r1 +add.lt 1, r0, r1 +st.1 128, r1 +``` + +## GT + +Original [EVM](https://www.evm.codes/#11?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp ugt i256 %value1, %value2 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +sub! r1, r2, r1 +add 0, r0, r1 +add.gt 1, r0, r1 +st.1 128, r1 +``` + +## SLT + +Original [EVM](https://www.evm.codes/#12?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp slt i256 %value1, %value2 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +add @CPI0_4[0], r0, r3 +sub! r1, r2, r4 +add r0, r0, r4 +add.lt r3, r0, r4 +and @CPI0_4[0], r2, r2 +and @CPI0_4[0], r1, r1 +sub! r1, r2, r5 +add.le r0, r0, r3 +xor r1, r2, r1 +sub.s! @CPI0_4[0], r1, r1 +add r4, r0, r1 +add.eq r3, r0, r1 +sub! r1, r0, r1 +add 0, r0, r1 +add.ne 1, r0, r1 +st.1 128, r1 +``` + +## SGT + +Original [EVM](https://www.evm.codes/#13?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp sgt i256 %value1, %value2 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +add @CPI0_4[0], r0, r3 +sub! r1, r2, r4 +add r0, r0, r4 +add.gt r3, r0, r4 +and @CPI0_4[0], r2, r2 +and @CPI0_4[0], r1, r1 +sub! r1, r2, r5 +add.ge r0, r0, r3 +xor r1, r2, r1 +sub.s! @CPI0_4[0], r1, r1 +add r4, r0, r1 +add.eq r3, r0, r1 +sub! r1, r0, r1 +add 0, r0, r1 +add.ne 1, r0, r1 +st.1 128, r1 +``` + +## EQ + +Original [EVM](https://www.evm.codes/#14?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp eq i256 %value1, %value2 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r0, r1 +ptr.add.s 36, r1, r2 +ld r2, r2 +ptr.add.s 4, r1, r1 +ld r1, r1 +sub! r1, r2, r1 +add 0, r0, r1 +add.eq 1, r0, r1 +st.1 128, r1 +``` + +## ISZERO + +Original [EVM](https://www.evm.codes/#15?fork=shanghai) instruction. + +### LLVM IR + +```txt +%comparison_result = icmp eq i256 %value, 0 +%comparison_result_extended = zext i1 %comparison_result to i256 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/comparison.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#icmp-instruction) + +### EraVM Assembly + +```asm +ptr.add stack[@ptr_calldata], r1, r1 +ld r1, r1 +sub! r1, r0, r1 +add 0, r0, r1 +add.eq 1, r0, r1 +st.1 128, r1 +``` diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/memory.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/memory.md new file mode 100644 index 00000000..1642fc5d --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/memory.md @@ -0,0 +1,76 @@ +--- +title: Memory +description: +--- + +## MLOAD + +Original [EVM](https://www.evm.codes/#51?fork=shanghai) instruction. + +Heap memory load operation is modeled with a native EraVM instruction. + +### LLVM IR + +```txt +%value = load i256, ptr addrspace(1) %pointer, align 1 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/memory.rs#L15) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#load-instruction) + +### EraVM Assembly + +```asm +ld.1 r1, r2 +``` + +See [EraVM instruction: `st.1`](https://matter-labs.github.io/eravm-spec/spec.html#LoadDefinition) + +## MSTORE + +Original [EVM](https://www.evm.codes/#52?fork=shanghai) instruction. + +Heap memory load operation is modeled with a native EraVM instruction. + +### LLVM IR + +```txt +store i256 128, ptr addrspace(1) inttoptr (i256 64 to ptr addrspace(1)), align 1 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/memory.rs#L38) +is common for Yul and EVMLA representations. + +[LLVM IR instruction documentation](https://releases.llvm.org/15.0.0/docs/LangRef.html#store-instruction) + +### EraVM Assembly + +```asm +st.1 r1, r2 +``` + +See [EraVM instruction: `st.1`](https://matter-labs.github.io/eravm-spec/spec.html#StoreDefinition) + +## MSTORE8 + +Original [EVM](https://www.evm.codes/#53?fork=shanghai) instruction. + +### LLVM IR + +```txt +define void @__mstore8(i256 addrspace(1)* nocapture nofree noundef dereferenceable(32) %addr, i256 %val) #2 { +entry: + %orig_value = load i256, i256 addrspace(1)* %addr, align 1 + %orig_value_shifted_left = shl i256 %orig_value, 8 + %orig_value_shifted_right = lshr i256 %orig_value_shifted_left, 8 + %byte_value_shifted = shl i256 %val, 248 + %store_result = or i256 %orig_value_shifted_right, %byte_value_shifted + store i256 %store_result, i256 addrspace(1)* %addr, align 1 + ret void +} +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/memory.rs#L62) +is common for Yul and EVMLA representations. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/return.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/return.md new file mode 100644 index 00000000..464ae579 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/return.md @@ -0,0 +1,78 @@ +--- +title: Return +description: +--- + +## STOP + +Original [EVM](https://www.evm.codes/#00?fork=shanghai) instruction. + +This instruction is a [RETURN](#return) with an empty data payload. + +### LLVM IR + +The same as for [RETURN](#return). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return.rs#L103) +is common for Yul and EVMLA representations. + +## RETURN + +Original [EVM](https://www.evm.codes/#f3?fork=shanghai) instruction. + +This instruction works differently in deploy code. For more information, see [the zkSync Era documentation](/build/developer-reference/ethereum-differences/evm-instructions#return-stop). + +### LLVM IR + +```txt +define void @__return(i256 %0, i256 %1, i256 %2) "noinline-oz" #5 personality i32()* @__personality { +entry: + %abi = call i256@__aux_pack_abi(i256 %0, i256 %1, i256 %2) + tail call void @llvm.syncvm.return(i256 %abi) + unreachable +} +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return.rs#L16) +is common for Yul and EVMLA representations. + +## REVERT + +Original [EVM](https://www.evm.codes/#fd?fork=shanghai) instruction. + +### LLVM IR + +```txt +define void @__revert(i256 %0, i256 %1, i256 %2) "noinline-oz" #5 personality i32()* @__personality { +entry: + %abi = call i256@__aux_pack_abi(i256 %0, i256 %1, i256 %2) + tail call void @llvm.syncvm.revert(i256 %abi) + unreachable +} +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return.rs#L86) +is common for Yul and EVMLA representations. + +### EraVM + +See also EraVM instruction `revert`: [when returning from near calls](https://matter-labs.github.io/eravm-spec/spec.html#NearRevertDefinition) +and [when returning from far calls](https://matter-labs.github.io/eravm-spec/spec.html#FarRevertDefinition). + +## INVALID + +Original [EVM](https://www.evm.codes/#fe?fork=shanghai) instruction. + +This instruction is a [REVERT](#revert) with an empty data payload, but it also burns all the available gas. + +### LLVM IR + +The same as for [REVERT](#revert). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/return.rs#L115) +is common for Yul and EVMLA representations. + +### EraVM + +See also EraVM instruction `revert`: [when returning from near calls](https://matter-labs.github.io/eravm-spec/spec.html#NearRevertDefinition) +and [when returning from far calls](https://matter-labs.github.io/eravm-spec/spec.html#FarRevertDefinition). diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/stack.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/stack.md new file mode 100644 index 00000000..ee2f665e --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/stack.md @@ -0,0 +1,56 @@ +--- +title: Stack +description: +--- + +## POP + +Original [EVM](https://www.evm.codes/#50?fork=shanghai) instruction. + +In Yul, only used to mark unused values, and is not translated to LLVM IR. + +```solidity +pop(staticcall(gas(), address(), 0, 64, 0, 32)) +``` + +For EVMLA, see [EVM Legacy Assembly Translator](/zk-stack/components/compiler/specification/evmla-translator). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/assembly/instruction/stack.rs#L108). + +## JUMPDEST + +Original [EVM](https://www.evm.codes/#5b?fork=shanghai) instruction. + +Is not available in Yul. + +Ignored in EVMLA. See [EVM Legacy Assembly Translator](/zk-stack/components/compiler/specification/evmla-translator) for more information. + +## PUSH - PUSH32 + +Original [EVM](https://www.evm.codes/#5f?fork=shanghai) instructions. + +Is not available in Yul. + +For EVMLA, see [EVM Legacy Assembly Translator](/zk-stack/components/compiler/specification/evmla-translator). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/assembly/instruction/stack.rs#L10). + +## DUP1 - DUP16 + +Original [EVM](https://www.evm.codes/#80?fork=shanghai) instructions. + +Is not available in Yul. + +For EVMLA, see [EVM Legacy Assembly Translator](/zk-stack/components/compiler/specification/evmla-translator). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/assembly/instruction/stack.rs#L48). + +## SWAP1 - SWAP16 + +Original [EVM](https://www.evm.codes/#90?fork=shanghai) instructions. + +Is not available in Yul. + +For EVMLA, see [EVM Legacy Assembly Translator](/zk-stack/components/compiler/specification/evmla-translator). + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-solidity/blob/main/src/evmla/assembly/instruction/stack.rs#L74). diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/storage.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/storage.md new file mode 100644 index 00000000..77f0e1c0 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/20.evm/storage.md @@ -0,0 +1,46 @@ +--- +title: Storage +description: +--- + +## SLOAD + +Original [EVM](https://www.evm.codes/#54?fork=shanghai) instruction. + +Storage load operation is modeled with a native EraVM instruction. + +### LLVM IR + +```txt +%value = load i256, ptr addrspace(5) %pointer, align 1 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/storage.rs#L13) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +sload r1, r2 +``` + +## SSTORE + +Original [EVM](https://www.evm.codes/#55?fork=shanghai) instruction. + +Storage store operation is modeled with a native EraVM instruction. + +### LLVM IR + +```txt +store i256 42, ptr addrspace(5) inttoptr (i256 1 to ptr addrspace(5)), align 1 +``` + +[The LLVM IR generator code](https://github.com/matter-labs/era-compiler-llvm-context/blob/main/src/eravm/evm/storage.rs#L34) +is common for Yul and EVMLA representations. + +### EraVM Assembly + +```asm +sstore r1, r2 +``` diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/30.evmla.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/30.evmla.md new file mode 100644 index 00000000..a65b048b --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/30.evmla.md @@ -0,0 +1,89 @@ +--- +title: EVM Legacy Assembly +description: +--- + +These instructions do not have a direct representation in EVM or zkSync VM. Instead, they perform auxiliary operations +required for generating the target bytecode. + +## PUSH [$] + +The same as [datasize](/zk-stack/components/compiler/specification/instructions/yul#datasize). + +LLVM IR codegen references: + +<!-- TODO: the Shared FE code link does not link anywhere --> +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L144) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/create.rs#L149) + +## PUSH #[$] + +The same as [dataoffset](/zk-stack/components/compiler/specification/instructions/yul#dataoffset). + +LLVM IR codegen references: +<!-- TODO: the Shared FE code link does not link anywhere --> +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L135) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/create.rs#L97) + +## ASSIGNIMMUTABLE + +The same as [setimmutable](/zk-stack/components/compiler/specification/instructions/yul#setimmutable). + +For more information, see the +[Differences with Ethereum](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable). + +LLVM IR codegen references: +<!-- TODO: the Shared FE code link does not link anywhere --> +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L760) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/immutable.rs#L79) + +## PUSHIMMUTABLE + +The same as [loadimmutable](/zk-stack/components/compiler/specification/instructions/yul#loadimmutable). + +For more information, see the +[Differences with Ethereum](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable). + +LLVM IR codegen references: +<!-- TODO: the Shared FE code link does not link anywhere --> +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L747) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/immutable.rs#L17) + +## PUSHLIB + +The same as [linkersymbol](/zk-stack/components/compiler/specification/instructions/yul#linkersymbol). + +For more information, see the +[Differences with Ethereum](/build/developer-reference/ethereum-differences/libraries). + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L956). + +## PUSHDEPLOYADDRESS + +Returns the address the contract is deployed to. + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L956). + +## PUSHSIZE + +Can be only found in deploy code. On EVM, returns the total size of the runtime code and constructor arguments. + +On zkSync VM, it is always 0, since zkSync VM does not operate on runtime code in deploy code. + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L907). + +## PUSH data + +Pushes a data chunk onto the stack. Data chunks are resolved during the processing of input assembly JSON. + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/ethereal_ir/function/block/element/mod.rs#L164). + +## PUSH [tag] + +Pushes an EVM Legacy Assembly destination block identifier onto the stack. + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/evmla/assembly/instruction/stack.rs#L31). + +## Tag + +Starts a new EVM Legacy Assembly block. Tags are processed during the translation of EVM Legacy Assembly into EthIR. diff --git a/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/40.yul.md b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/40.yul.md new file mode 100644 index 00000000..c84e951c --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/20.specification/60.instructions/40.yul.md @@ -0,0 +1,104 @@ +--- +title: Yul +description: +--- + +These instructions do not have a direct representation in EVM or zkSync VM. Instead, they perform auxiliary operations +required for generating the target bytecode. + +## datasize + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#datasize-dataoffset-datacopy) auxiliary instruction. + +Unlike on EVM, on zkSync VM target this instruction returns the size of the header part of the calldata sent to the +[ContractDeployer](/zk-stack/components/compiler/specification/system-contracts#contract-deployer). +For more information, see [CREATE](/zk-stack/components/compiler/specification/instructions/evm/create). + +LLVM IR codegen references: + +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L928) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/create.rs#L149) + +## dataoffset + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#datasize-dataoffset-datacopy) auxiliary instruction. + +Unlike on EVM, on zkSync VM target this instruction has nothing to do with the offset. Instead, it returns the bytecode hash +of the contract referenced by the Yul object identifier. Since our compiler translates instructions without analyzing +the surrounding context, it is not possible to get the bytecode hash from anywhere else in [datacopy](#datacopy). For +more information, see [CREATE](/zk-stack/components/compiler/specification/instructions/evm/create). + +LLVM IR codegen references: + +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L918) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/zkevm/evm/create.rs#L97) + +## datacopy + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#datasize-dataoffset-datacopy) auxiliary instruction. + +Unlike on EVM, on zkSync VM target this instruction copies the bytecode hash passed as [dataoffset](#dataoffset) to the +destination. For more information, see [CREATE](/zk-stack/components/compiler/specification/instructions/evm/create). + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L938). + +## setimmutable + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#setimmutable-loadimmutable) auxiliary instruction. + +Writes immutables to the auxiliary heap. + +For more information, see the [Differences with Ethereum](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable). + +LLVM IR codegen references: + +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L562) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/immutable.rs#L79) + +## loadimmutable + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#setimmutable-loadimmutable) auxiliary instruction. + +Reads immutables from the [ImmutableSimulator](/zk-stack/components/compiler/specification/system-contracts#simulator-of-immutables). + +For more information, see the +[Differences with Ethereum](/build/developer-reference/ethereum-differences/evm-instructions#setimmutable-loadimmutable). + +LLVM IR codegen references: + +1. [zksolc compiler](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L540) +2. [Shared FE code](%%zk_git_repo_era-compiler-llvm-context%%/blob/main/src/eravm/evm/immutable.rs#L17) + +## linkersymbol + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#linkersymbol) auxiliary instruction. + +Returns the address of a deployable library. The address must be passed to `zksolc` with the `--libraries` option, +otherwise a compile-time error will be produced. + +There is a special `zksolc` execution mode that can be enabled with `--missing-libraries` flag. In this mode, the +compiler will return the list of deployable libraries not provided with `--libraries`. This mode allows package managers +like Hardhat to automatically deploy libraries. + +For more information, see the +[Differences with Ethereum](/build/developer-reference/ethereum-differences/libraries). + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L956). + +## memoryguard + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#memoryguard) auxiliary instruction. + +Is a Yul optimizer hint which is not used by our compiler. Instead, its only argument is simply unwrapped and returned. + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/mod.rs#L968). + +## verbatim + +Original [Yul](https://docs.soliditylang.org/en/latest/yul.html#verbatim) auxiliary instruction. + +Unlike on EVM, on zkSync VM target this instruction has nothing to do with inserting of EVM bytecode. Instead, it is used to implement +[zkSync VM Yul Extensions](/zk-stack/components/compiler/specification/instructions#yul-extensions) available in the system mode. +In order to compile a Yul contract with extensions, both Yul and system mode must be enabled (`zksolc --yul --system-mode ...`). + +[The LLVM IR generator code](%%zk_git_repo_era-compiler-solidity%%/blob/main/src/yul/parser/statement/expression/function_call/verbatim.rs). diff --git a/content/10.zk-stack/10.components/70.compiler/_dir.yml b/content/10.zk-stack/10.components/70.compiler/_dir.yml new file mode 100644 index 00000000..0e890cb2 --- /dev/null +++ b/content/10.zk-stack/10.components/70.compiler/_dir.yml @@ -0,0 +1 @@ +title: Compiler diff --git a/content/10.zk-stack/10.components/80.fee-withdrawer.md b/content/10.zk-stack/10.components/80.fee-withdrawer.md new file mode 100644 index 00000000..42cac003 --- /dev/null +++ b/content/10.zk-stack/10.components/80.fee-withdrawer.md @@ -0,0 +1,9 @@ +--- +title: Fee Withdrawer +description: Learn about the Fee Withdrawer, a tool that automates the transfer of collected fees from a ZK Chain to a base layer address. +--- + +[The Fee Withdrawer](https://github.com/matter-labs/era-fee-withdrawer) +is a specialized tool that automates the process of transferring collected fees from a ZK Chain to a specified address on the base layer. +This functionality is crucial for maintaining the continuous operation of the ETH operator on the base layer, +ensuring there is always a sufficient supply of the gas token available for transactions. diff --git a/content/10.zk-stack/10.components/90.portal-wallet-bridge.md b/content/10.zk-stack/10.components/90.portal-wallet-bridge.md new file mode 100644 index 00000000..e56a8233 --- /dev/null +++ b/content/10.zk-stack/10.components/90.portal-wallet-bridge.md @@ -0,0 +1,17 @@ +--- +title: Portal - Wallet + Bridge +description: Discover how the Portal dApp facilitates interaction with your ZK Chain, including asset bridging, transaction tracking, and contract management. +--- + +[The Portal](https://github.com/matter-labs/dapp-portal) is a decentralized application (dApp) designed to enhance interaction with your ZK Chain. +It serves as a versatile tool for both you and your users, simplifying various operations within the blockchain environment. + +### Key Features + +- **Bridging Assets:** The Portal enables the movement of assets between the Layer 1 (L1) network and your ZK Chain, facilitating smooth asset transfers. +- **Internal Transactions:** Users can send assets within the ZK Chain efficiently, utilizing the Portal's user-friendly interface. +- **Transaction History:** The Portal provides access to view and verify historical transactions, enhancing transparency and user trust in the platform. +- **Contract Management:** It supports users in managing smart contracts, including deployment and interaction functionalities. + +Enhancing the Portal's capabilities is possible by integrating it with the [Block Explorer Indexer/API](block-explorer), +which provides additional data and analytics support, further enriching the user experience. diff --git a/content/10.zk-stack/10.components/_dir.yml b/content/10.zk-stack/10.components/_dir.yml new file mode 100644 index 00000000..9937383b --- /dev/null +++ b/content/10.zk-stack/10.components/_dir.yml @@ -0,0 +1 @@ +title: Components diff --git a/content/10.zk-stack/20.running-a-zk-chain/10.locally.md b/content/10.zk-stack/20.running-a-zk-chain/10.locally.md new file mode 100644 index 00000000..6fc06c34 --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/10.locally.md @@ -0,0 +1,245 @@ +--- +title: Locally +description: +--- + +## Getting Started with ZK Stack + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +A new and improved ZK Stack CLI is coming very soon. The current version is deprecated and may not function as expected. +:: + +## Development dependencies + +Ensure you have followed [these instructions](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/setup-dev.md) +to set up dependencies on your machine (don't worry about the Environment section for now). + +## Deploying locally + +1. Clone the zksync-era repo (or pull the latest if you've already cloned it) and go to the root of it: + + ```bash + git clone https://github.com/matter-labs/zksync-era + ``` + +1. Add `ZKSYNC_HOME` to your path (e.g. `~/.bash_profile`, `~/.zshrc` ) - don't forget to source your profile file again (or restart your terminal): + + ```bash + export ZKSYNC_HOME=/path/to/zksync/repo/you/cloned + export PATH=$ZKSYNC_HOME/bin:$PATH + ``` + +1. Build latest version of zk tools by just running `zk` on the root of the project. + + ```bash + zk + ``` + +1. Last, start the wizard and follow instructions to set up and deploy your new ZK Chain by running `zk stack init` + + - Initially you want to `Configure new chain` + + - Give it a name and chain id. + + - Select localhost (default `matterlabs/geth`) and follow the wizard. + + - If you are doing this for the first time, several components need to be compiled/built, so do not worry if it takes a few minutes. + The console will show what is going on anyways. + + - If you don't want to configure any values for now and just want check the build process for a ZK Chain, try out the `zk stack demo` command. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +The commands above are not just running docker containers, but are actually building the code from the repo to spin up your ZK Chain. +For this reason the process might take some time. +If you just want to run docker containers to play around with a zkSync chain, you can use `zksync-cli dev`. +Learn more [here](/build/tooling/zksync-cli). +:: + +### Your ZK Chain is now deployed + +Your ZK Chain is now deployed to the base chain (most likely a local geth docker container) and configured. +You can find all configuration in a new `.env` file created on `<project root>/etc/env/<your_chain_name_slug>.env`, +and if you deployed test tokens, their addresses will be available at `<project root>/etc/tokens/<the_l1_identifier>.json` + +1. The wizard allows you to run the server in the end. If you chose not to, you’re still able to run it by executing + + ```bash + zk server --components "http_api,eth,state_keeper,housekeeper" + ``` + +1. You can now run transactions and start playing with your ZK Chain by using the RPC available at <http://localhost:3050>. + + - Don't forget to deposit some ETH and fund your accounts on your ZK Chain. To do so follow the instructions for [Funding accounts](#funding-accounts). + +## Using your ZK Chain + +### Funding accounts + +During the `zk stack init` configurator, you have a choice of what base layer to deploy the ZK Chain onto: +the local geth node, or an Ethereum network (e.g., Sepolia). +The first step to start interacting with your ZK Chain is to fund an account (or a few). +This means you need some funds on the base layer. + +#### Base layer is the local geth node +[@matterlabs/geth:latest](https://hub.docker.com/r/matterlabs/geth) + +- If you chose to deploy on local geth node, you will have a set of addresses that have 100 ETH each. + You can find the list [here](https://github.com/matter-labs/local-setup/blob/main/rich-wallets.json) + and use these addresses to deposit into your ZK Chain via the bridge. + +#### Base layer is an Ethereum network (e.g., Sepolia) + +- If you chose to deploy on an Ethereum network (e.g., Sepolia), you need to have an account on the base layer with ETH. + You can use the deployer, governor, or operator wallets setup during the the deployment process, + or any other one you have funds, to deposit into your ZK Chain via the bridge. + +Once you have the accounts with funds on the L1 base layer, you can do a deposit via the bridge to your ZK Chain, +and any further interactions with your ZK Chain. + +### Using your ZK Chain RPC + +Your server contains both HTTPS as well as WebSocket (WS) services that are fully web3 compatible (and contain some extra ZK Stack functionalities). +Learn more about it [here](/build/api-reference). + +### Using zksync-cli + +zkSync CLI allows you to easily interact and develop applications on your ZK Chain. +When executing any command with zksync-cli, you can specify RPC urls for both L1 and L2. +Your local server contains RPCs for both. +An example deposit command via the bridge would look like: + +```bash +npx zksync-cli bridge deposit --rpc=http://localhost:3050 --l1-rpc=http://localhost:8545 +``` + +### Using Portal + +The [dApp Portal](https://github.com/matter-labs/dapp-portal) module allows you to: + +- View balances, transfer and bridge tokens to your ZK Chain. +- Add contacts for quick and easy access. + +You can run the Portal module locally, and point it to your ZK Chain configuration. It comes with scripts that help +pulling the ZK Chain configuration from your zksync-era repo and adapting to portal needs. Learn more +[here](https://github.com/matter-labs/dapp-portal). An example command would look like: + +```bash +npm run hyperchain:configure ../zksync-era +npm run dev:node:hyperchain +``` + +You can now navigate to the displayed Portal URL (typically <http://localhost:3000>). + +### Using Block Explorer + +A [free open source block explorer](https://github.com/matter-labs/block-explorer) is available for your ZK Chain. Block explorer contains three components + +- [Worker](https://github.com/matter-labs/block-explorer/tree/main/packages/worker) +- [API](https://github.com/matter-labs/block-explorer/tree/main/packages/api) +- [App](https://github.com/matter-labs/block-explorer/tree/main/packages/app) + +Which you can run all together locally and connect to your ZK Chain. + +Make sure you have your [zksync-era](https://github.com/matter-labs/zksync-era) repo set up locally and +the `zk server` is running. The wizard in this guide allows you to run the server in the end. If you chose not to, you’re still able to run it by executing: + +```bash +zk server --components "http_api,eth,state_keeper,housekeeper" +``` + +### Running block explorer locally + +#### Install block explorer + +Clone & install the block explorer repository: + +```bash +cd /path/to/where/you/clone/repos +git clone https://github.com/matter-labs/block-explorer.git +cd block-explorer +npm install +``` + +#### Setting up env variables + +Next you need to set up all the necessary environment and configuration files with your ZK Chain settings. You can use a script to set them up: + +```bash +npm run hyperchain:configure +``` + +#### Run block explorer + +Afterwards you can run the block explorer: + +```bash +# if you are running block explorer for the first time +npm run db:create +``` + +```bash +npm run dev +``` + +#### Verify block explorer is up and running + +By default, you can access front-end `App` at <http://localhost:3010> in your browser. `API` should +be available at <http://localhost:3020>, `Worker` at <http://localhost:3001> and `Data Fetcher` at <http://localhost:3040>. + +## Enabling Boojum prover + +With the default configuration, your ZK Chain is not running a prover, and has a DummyExecutor contract, +which mainly “accepts” that a batch is executed without proof. This enables you to test it with much lower hardware requirements. + +To enable the prover, run the `zk stack prover-setup` command. It will guide through the necessary configuration. + +There are two options for running the Boojum prover: in GPU, or in CPU. + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +**Running a prover is not required** for deploying a testnet. The requirements below are only necessary if you want to enable the prover. +:: + +### Requirements for GPU Prover + +The docker compose file assumes you will be running all components in the same machine. The current minimum requirements for a low TPS scenario are: + +- 6 GB VRAM NVIDIA GPU +- 16 Core CPU +- 64 GB of RAM +- 300 GB of Disk Space (SSD preferred) + +### Requirements for CPU Prover + +The docker compose file assumes you will be running all components in the same machine. +The current minimum requirements for a low TPS scenario are: + +- 32 Core CPU +- 128 GB of RAM +- 700 of Disk Space (SSD preferred) + +## Addendum + +- If you make changes to any contract, you can always deploy a new ZK Chain to easily test those changes. +- If you configure your ZK Chain once, you don't need to do it again as the wizard allows you to use an existing config file. +- For now, it is only possible to deploy a ZK Chain as an L2, but soon it will also work as L3s. +- When running the default matterlabs/geth, you have a set of rich wallets available to you. + ::drop-panel + ::panel{label="Rich Wallets"} + :display-partial{path="/_partials/_rich-wallets"} + :: + :: +- If you face an issue compiling rust code (example `<jemalloc>: Error allocating TSD`) try removing the `rust-toolchain` file from the repo. +- If you want to have a custom local base chain, you must ensure you have a database for your ZK Chain, as well as the local RPC for your L1. + - To run a Postgres 14 database for your ZK Chain, execute the following: + + ```bash + docker-compose -f docker-compose-zkstack-common.yml up -d postgres + ``` + +In case you don't want to use the docker Postgres database above but another one you already have locally, +make sure its version is 14 and it is running and accepts connections at `postgres://postgres@localhost/zksync_local`. +You can test with: + +```bash +psql -h localhost -U postgres -d postgres -c 'SELECT 1;' +``` diff --git a/content/10.zk-stack/20.running-a-zk-chain/20.production.md b/content/10.zk-stack/20.running-a-zk-chain/20.production.md new file mode 100644 index 00000000..8a9d810c --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/20.production.md @@ -0,0 +1,25 @@ +--- +title: In Production +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +A new and improved ZK Stack CLI is coming very soon. The current version is deprecated and may not function as expected. +:: + +## Deploying to a non-local environment + +The process to deploy to a non local environment is pretty similar to the local one. +The wizard allows you to set up URLs to external services (database, RPCs, etc). + +## Database + +The wizard allows you to provide a custom database url connector. +Make sure you provide it and that it accepts external connections if your server is not running in the same private network. + +## Server (Sequencer) & Prover + +After configuring your ZK Chain, you can generate docker images for your server and prover. +To do that run `zk stack docker-setup`. + +This command will guide you to properly name and tag your image. +After building it, a docker compose file will be available so you can run the images on whichever cloud environment you desire. diff --git a/content/10.zk-stack/20.running-a-zk-chain/30.raas.md b/content/10.zk-stack/20.running-a-zk-chain/30.raas.md new file mode 100644 index 00000000..cc142ef5 --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/30.raas.md @@ -0,0 +1,30 @@ +--- +title: Rollup as a Service +description: +--- + +::callout{icon="i-heroicons-exclamation-triangle" color="amber"} +ZK Stack is still under development. We advise you to only use for local and testnet deployments. +:: + +## Deploying and running using a Rollup as a Service provider + +Looking to deploy a ZK Stack chain but worried about complexities? +RaaS providers are here to simplify the process! +Providers offer scalable and secure nodes, and may provide quick and user-friendly interfaces, +allowing you to deploy your ZK Stack chain with ease and efficiency. +Experience the seamless integration of advanced blockchain technology without the hassle. +Get started today and revolutionize your product with the power of RaaS and ZK Stack! + +Use RaaS in to improve scalability, reduce costs, access specialized services, speed up development, enhance interoperability, +and maintain flexibility in an ever-evolving technological landscape. + +The list of RaaS providers you can use to deploy and customise their your ZK Chain: + +<!-- * [Caldera](https://www.caldera.xyz/) --> + +- [Zeeve](https://www.zeeve.io/appchains/zksync-hyperchains-zkrollups/) +- [Ankr](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/ankrpbc1684783099666.ankr_appchains?tab=Overview/) +- [AltLayer](https://altlayer.io/raas) +- [Magic](https://magic.link/docs/blockchains/other-chains/evm/zksync) +- [Luganodes](https://www.luganodes.com/product/zkraas/) diff --git a/content/10.zk-stack/20.running-a-zk-chain/99.dependencies.md b/content/10.zk-stack/20.running-a-zk-chain/99.dependencies.md new file mode 100644 index 00000000..7bdb5197 --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/99.dependencies.md @@ -0,0 +1,289 @@ +--- +title: Dependencies +--- + +## TL;DR + +If you run on 'clean' Debian on GCP: + +```bash +# Rust +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +# NVM +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash +# All necessary stuff +sudo apt-get install build-essential pkg-config cmake clang lldb lld libssl-dev postgresql docker-compose +# Docker +sudo usermod -aG docker YOUR_USER + +## You might need to re-connect (due to usermod change). + +# Node & yarn +nvm install node +npm install -g yarn +yarn set version 1.22.19 + +# SQL tools +cargo install sqlx-cli --version 0.5.13 +# Stop default postgres (as we'll use the docker one) +sudo systemctl stop postgresql +# Start docker. +sudo systemctl start docker +``` + +## Supported operating systems + +zkSync currently can be launched on any \*nix operating system (e.g. any linux distribution or MacOS). + +If you're using Windows, then make sure to use WSL 2, since WSL 1 is known to cause troubles. + +Additionally, if you are going to use WSL 2, make sure that your project is located in the _linux filesystem_, since +accessing NTFS partitions from inside of WSL is very slow. + +If you're using MacOS with an ARM processor (e.g. M1/M2), make sure that you are working in the _native_ environment +(e.g. your terminal and IDE don't run in Rosetta, and your toolchain is native). Trying to work with zkSync code via +Rosetta may cause problems that are hard to spot and debug, so make sure to check everything before you start. + +If you are a NixOS user or would like to have a reproducible environment, skip to the section about `nix`. + +## `Docker` + +Install `docker`. It is recommended to follow the instructions from the +[official site](https://docs.docker.com/install/). + +Note: currently official site proposes using Docker Desktop for Linux, which is a GUI tool with plenty of quirks. If you +want to only have CLI tool, you need the `docker-ce` package and you can follow +[this guide](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04) for Ubuntu. + +Installing `docker` via `snap` or from the default repository can cause troubles. + +You need to install both `docker` and `docker-compose`. + +**Note:** `docker-compose` is installed automatically with `Docker Desktop`. + +**Note:** On linux you may encounter the following error when you’ll try to work with `zksync`: + +```sh +ERROR: Couldn't connect to Docker daemon - you might need to run `docker-machine start default`. +``` + +If so, you **do not need** to install `docker-machine`. Most probably, it means that your user is not added to +the`docker` group. You can check it as follows: + +```bash +docker-compose up # Should raise the same error. +sudo docker-compose up # Should start doing things. +``` + +If the first command fails, but the second succeeds, then you need to add your user to the `docker` group: + +```bash +sudo usermod -a -G docker your_user_name +``` + +After that, you should logout and login again (user groups are refreshed after the login). The problem should be solved +at this step. + +If logging out does not help, restarting the computer should. + +## `Node` & `Yarn` + +1. Install `Node` (requires version `v18.18.0`). Since our team attempts to always use the latest LTS version of + `Node.js`, we suggest you to install [nvm](https://github.com/nvm-sh/nvm). It will allow you to update `Node.js` + version easily in the future (by running `nvm use` in the root of the repository) +2. Install `yarn` (make sure to get version 1.22.19 - you can change the version by running `yarn set version 1.22.19`). + Instructions can be found on the [official site](https://classic.yarnpkg.com/en/docs/install/). + Check if `yarn` is installed by running `yarn -v`. If you face any problems when installing `yarn`, it might be the + case that your package manager installed the wrong package.Make sure to thoroughly follow the instructions above on + the official website. It contains a lot of troubleshooting guides in it. + +## `Axel` + +Install `axel` for downloading keys: + +On mac: + +```bash +brew install axel +``` + +On debian-based linux: + +```bash +sudo apt-get install axel +``` + +Check the version of `axel` with the following command: + +```sh +axel --version +``` + +Make sure the version is higher than `2.17.10`. + +## `clang` + +In order to compile RocksDB, you must have LLVM available. On debian-based linux it can be installed as follows: + +On linux: + +```bash +sudo apt-get install build-essential pkg-config cmake clang lldb lld +``` + +On mac: + +You need to have an up-to-date `Xcode`. You can install it directly from `App Store`. With Xcode command line tools, you +get the Clang compiler installed by default. Thus, having XCode you don't need to install `clang`. + +## `OpenSSL` + +Install OpenSSL: + +On mac: + +```bash +brew install openssl +``` + +On linux: + +```bash +sudo apt-get install libssl-dev +``` + +## `Rust` + +Install the latest `rust` version. + +Instructions can be found on the [official site](https://www.rust-lang.org/tools/install). + +Verify the `rust` installation: + +```bash +rustc --version +rustc 1.xx.y (xxxxxx 20xx-yy-zz) # Output may vary depending on actual version of rust +``` + +If you are using MacOS with ARM processor (e.g. M1/M2), make sure that you use an `aarch64` toolchain. For example, when +you run `rustup show`, you should see a similar input: + +```bash +rustup show +Default host: aarch64-apple-darwin +rustup home: /Users/user/.rustup + +installed toolchains +-------------------- + +... + +active toolchain +---------------- + +1.67.1-aarch64-apple-darwin (overridden by '/Users/user/workspace/zksync-era/rust-toolchain') +``` + +If you see `x86_64` mentioned in the output, probably you're running (or used to run) your IDE/terminal in Rosetta. If +that's the case, you should probably change the way you run terminal, and/or reinstall your IDE, and then reinstall the +Rust toolchain as well. + +## Postgres + +Install the latest postgres: + +On mac: + +```bash +brew install postgresql@14 +``` + +On linux: + +```bash +sudo apt-get install postgresql +``` + +### Cargo nextest + +[cargo-nextest](https://nexte.st/) is the next-generation test runner for Rust projects. `zk test rust` uses +`cargo nextest` by default. + +```bash +cargo install cargo-nextest +``` + +### SQLx CLI + +SQLx is a Rust library we use to interact with Postgres, and its CLI is used to manage DB migrations and support several +features of the library. + +```bash +cargo install sqlx-cli --version 0.5.13 +``` + +## Solidity compiler `solc` + +Install the latest solidity compiler. + +```bash +brew install solidity +``` + +Alternatively, download a [precompiled version](https://github.com/ethereum/solc-bin) and add it to your PATH. + +## Python + +Most environments will have this preinstalled but if not, install Python. + +## Easier method using `nix` + +Nix is a tool that can fetch _exactly_ the right dependencies specified via hashes. The current config is Linux-only but +it is likely that it can be adapted to Mac. + +Install `nix`. Enable the nix command and flakes. + +Install docker, rustup and use rust to install SQLx CLI like described above. If you are on NixOS, you also need to +enable nix-ld. + +Go to the zksync folder and run `nix develop --impure`. After it finishes, you are in a shell that has all the +dependencies. + +## Environment + +Edit the lines below and add them to your shell profile file (e.g. `~/.bash_profile`, `~/.zshrc`): + +```bash +# Add path here: +export ZKSYNC_HOME=/path/to/zksync + +export PATH=$ZKSYNC_HOME/bin:$PATH + +# If you're like me, uncomment: +# cd $ZKSYNC_HOME +``` + +### Tip: `mold` + +Optionally, you may want to optimize the build time with the modern linker, [`mold`](https://github.com/rui314/mold). + +This linker will speed up the build times, which can be pretty big for Rust binaries. + +Follow the instructions in the repo in order to install it and enable for Rust. + +## Tip: Speeding up building `RocksDB` + +By default, each time you compile `rocksdb` crate, it will compile required C++ sources from scratch. It can be avoided +by using precompiled versions of library, and it will significantly improve your build times. + +In order to do so, you can put compiled libraries to some persistent location, and add the following to your shell +configuration file (e.g. `.zshrc` or `.bashrc`): + +```sh +export ROCKSDB_LIB_DIR=<library location> +export SNAPPY_LIB_DIR=<library location> +``` + +Make sure that compiled libraries match the current version of RocksDB. One way to obtain them, is to compile the +project in the usual way once, and then take built libraries from +`target/{debug,release}/build/librocksdb-sys-{some random value}/out`. diff --git a/content/10.zk-stack/20.running-a-zk-chain/99.enabling-prover.md b/content/10.zk-stack/20.running-a-zk-chain/99.enabling-prover.md new file mode 100644 index 00000000..7b8d5a0a --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/99.enabling-prover.md @@ -0,0 +1,30 @@ +--- +title: Enabling Prover +description: +--- + +With the default configuration, your ZK Chain is not running a prover, +and has a `DummyExecutor` contract, which mainly “accepts” that a batch is executed without proof. +This enables you to test it with much lower hardware requirements. + +To enable the prover, run the `zk stack prover-setup` command. +It will guide through the necessary configuration. + +> :warning: Running a prover is not required for deploying a testnet. The requirements below are only necessary if you want to enable the prover. + +## Requirements for CPU Prover + +The docker compose file assumes you will be running all components in the same machine. The current minimum requirements for a low TPS scenario are: + +- 32 Core CPU +- 128 GB of RAM +- 700 of Disk Space (SSD preferred) + +## Requirements for GPU Prover + +The docker compose file assumes you will be running all components in the same machine. The current minimum requirements for a low TPS scenario are: + +- 16 GB VRAM NVIDIA GPU +- 16 Core CPU +- 64 GB of RAM +- 300 GB of Disk Space (SSD preferred) diff --git a/content/10.zk-stack/20.running-a-zk-chain/99.using-zk-chain.md b/content/10.zk-stack/20.running-a-zk-chain/99.using-zk-chain.md new file mode 100644 index 00000000..3d8cd211 --- /dev/null +++ b/content/10.zk-stack/20.running-a-zk-chain/99.using-zk-chain.md @@ -0,0 +1,77 @@ +--- +title: Using Your ZK Chain RPC +description: +--- + +Your server contains both HTTPS as well as WS services that are fully web3 compatible (and contain some extra ZK Stack functionalities). + +By default your server is available at <http://localhost:3050> - but if you deployed the server into some cloud provider, +you will have a different URL to interact with. + +## Using zksync-cli + +When executing any command with zksync-cli, +you can specify RPC urls for both L1 and L2 if you choose “localnet” as your network. +An example deposit command would look like: + +```bash +npx zksync-cli bridge deposit --rpc=http://localhost:3050 --l1-rpc=http://localhost:8545 +``` + +## Using dApp Portal + +You can run the Portal module locally, and point it to your ZK Chain configuration. +It comes with scripts that help pulling the ZK Chain configuration from your zksync-era repo and adapting to portal needs. +Learn more here. An example command would look like: + +```bash +npm run hyperchain:migrate ../zksync-era +npm run dev:node:hyperchain +``` + +## Using Block Explorer + +Block explorer contains three components (Worker, API, and App), which you can run all together locally and connect to your ZK Chain. +For that, you need to set up all the necessary environment and configuration files with your ZK Chain settings. +You can use a script to build them. See setting up env variables. + +Once you have your zksync-era repo set up locally, you can run the following command to +build environment and configuration files for block explorer based on your **[zksync-era](https://github.com/matter-labs/zksync-era)** repo configuration: + +```bash +npm run hyperchain:configure +``` + +The script generates all the necessary configuration files for block-explorer, which you can edit if you need any changes. + +## Addendum + +- If you make changes to any contract, you can always deploy a new ZK Chain to easily test those changes. + +- If you configure your ZK Chain once, you don't need to do it again as the wizard allows you to use an existing config file. + +- For now, it is only possible to deploy a ZK Chain as an L2, but soon it will also work as L3s. + +- When running the default matterlabs/geth, you have a set of rich wallets available to you. + ::drop-panel + ::panel{label="Rich Wallets"} + :display-partial{path="/_partials/_rich-wallets"} + :: + :: +- If you want to have a custom local base chain, you must ensure you have a database for your ZK Chain, as well as the local RPC for your L1. + +- To run a Postgres 14 database for your ZK Chain, execute the following: + +```bash +docker-compose -f docker-compose-zkstack-common.yml up -d postgres +``` + +In case you don't want to use the docker Postgres database above but another one you already have locally, +make sure its version is 14 and it is running and accepts connections at postgres://postgres@localhost/zksync_local. +You can test with: + +```bash +psql -h localhost -U postgres -d postgres -c 'SELECT 1;' +``` + +If you face an issue compiling rust code (example `<jemalloc>: Error allocating TSD`) try removing the `rust-toolchain` file from the repo. diff --git a/content/10.zk-stack/_dir.yml b/content/10.zk-stack/_dir.yml new file mode 100644 index 00000000..7ac56ee0 --- /dev/null +++ b/content/10.zk-stack/_dir.yml @@ -0,0 +1 @@ +title: ZK Stack diff --git a/content/20.zksync-node/00.index.md b/content/20.zksync-node/00.index.md new file mode 100644 index 00000000..8e68b61f --- /dev/null +++ b/content/20.zksync-node/00.index.md @@ -0,0 +1,162 @@ +--- +title: Introduction +description: +--- + +::callout{icon="i-heroicons-information-circle" color="blue"} +For local testing, we recommend setting up an in-memory node and forking mainnet. +:: + +This documentation explains the basics of the zkSync Era Node. + +## Disclaimers + +- The zkSync node software is provided "as-is" without any express or implied warranties. +- The zkSync node is in the beta phase, and should be used with caution. +- The zkSync node is a read-only replica of the main node. +- The zkSync node is not going to be the consensus node. +- Running a sequencer node is currently not possible and there is no option to vote on blocks as part of the consensus mechanism + or [fork-choice](https://eth2book.info/capella/part3/forkchoice/#whats-a-fork-choice) like on Ethereum. + +## What is the zkSync Node? + +The zkSync node is a read-replica of the main (centralized) node that can be run by anyone. It +functions by fetching data from the zkSync API and re-applying transactions locally, starting from the genesis block. +The zkSync node shares most of its codebase with the main node. Consequently, when it re-applies transactions, it does +so exactly as the main node did in the past. + +In Ethereum terms, the current state of the zkSync node represents an archive node, providing access to the entire history of the blockchain. + +## High-level Overview + +At a high level, the zkSync node can be seen as an application that has the following modules: + +- API server that provides the publicly available Web3 interface. +- Synchronization layer that interacts with the main node and retrieves transactions and blocks to re-execute. +- Sequencer component that actually executes and persists transactions received from the synchronization layer. +- Several checker modules that ensure the consistency of the zkSync node state. + +With the zkSync node, you are able to: + +- Locally recreate and verify the zkSync Era mainnet/testnet state. +- Interact with the recreated state in a trustless way (in a sense that the validity is locally verified, and you should + not rely on a third-party API zkSync Era provides). +- Use the Web3 API without having to query the main node. +- Send L2 transactions (that will be proxied to the main node). + +With the zkSync node, you _can not_: + +- Create L2 blocks or L1 batches on your own. +- Generate proofs. +- Submit data to L1. + +A more detailed overview of the zkSync node's components is provided in the components section. + +## API Overview + +API exposed by the zkSync node strives to be Web3-compliant. +If some method is exposed but behaves differently compared to +Ethereum, it should be considered a bug. +Please [report](https://zksync.io/contact) such cases. + +### `eth_` Namespace + +Data getters in this namespace operate in the L2 space: require/return L2 block numbers, check balances in L2, etc. + +Available methods: + +| Method | Notes | +|-------------------------------------------|---------------------------------------------------------------------------| +| `eth_blockNumber` | | +| `eth_chainId` | | +| `eth_call` | | +| `eth_estimateGas` | | +| `eth_gasPrice` | | +| `eth_newFilter` | Maximum amount of installed filters is configurable | +| `eth_newBlockFilter` | Same as above | +| `eth_newPendingTransactionsFilter` | Same as above | +| `eth_uninstallFilter` | | +| `eth_getLogs` | Maximum amount of returned entities can be configured | +| `eth_getFilterLogs` | Same as above | +| `eth_getFilterChanges` | Same as above | +| `eth_getBalance` | | +| `eth_getBlockByNumber` | | +| `eth_getBlockByHash` | | +| `eth_getBlockTransactionCountByNumber` | | +| `eth_getBlockTransactionCountByHash` | | +| `eth_getCode` | | +| `eth_getStorageAt` | | +| `eth_getTransactionCount` | | +| `eth_getTransactionByHash` | | +| `eth_getTransactionByBlockHashAndIndex` | | +| `eth_getTransactionByBlockNumberAndIndex` | | +| `eth_getTransactionReceipt` | | +| `eth_protocolVersion` | | +| `eth_sendRawTransaction` | | +| `eth_syncing` | EN is considered synced if it's less than 11 blocks behind the main node. | +| `eth_coinbase` | Always returns a zero address | +| `eth_accounts` | Always returns an empty list | +| `eth_getCompilers` | Always returns an empty list | +| `eth_hashrate` | Always returns zero | +| `eth_getUncleCountByBlockHash` | Always returns zero | +| `eth_getUncleCountByBlockNumber` | Always returns zero | +| `eth_mining` | Always returns false | + +### PubSub + +Only available on the WebSocket servers. + +Available methods: + +| Method | Notes | +| ------------------ | ----------------------------------------------- | +| `eth_subscribe` | Maximum amount of subscriptions is configurable | +| `eth_subscription` | | + +### `net_` Namespace + +Available methods: + +| Method | Notes | +| ---------------- | -------------------- | +| `net_version` | | +| `net_peer_count` | Always returns 0 | +| `net_listening` | Always returns false | + +### `web3_` Namespace + +Available methods: + +| Method | Notes | +| -------------------- | ----- | +| `web3_clientVersion` | | + +### `debug` namespace + +The `debug` namespace gives access to several non-standard RPC methods, which will allow developers to inspect and debug +calls and transactions. + +This namespace is disabled by default and can be configured via setting `EN_API_NAMESPACES` as described in the example config. + +Available methods: + +| Method | Notes | +| -------------------------- | ----- | +| `debug_traceBlockByNumber` | | +| `debug_traceBlockByHash` | | +| `debug_traceCall` | | +| `debug_traceTransaction` | | + +### `zks` namespace + +This namespace contains rollup-specific extensions to the Web3 API. +Note that _only methods_ specified in the documentation are considered public. +There may be other methods exposed in this namespace, but undocumented +methods come without any kind of stability guarantees and can be changed or removed without notice. + +Always refer to the documentation linked above and [API reference documentation](/build/api-reference) to see the list of stabilized methods in this namespace. + +### `en` namespace + +This namespace contains methods that zkSync nodes call on the main node while syncing. If this namespace is enabled +other zkSync nodes can sync from this node. diff --git a/content/20.zksync-node/05.quickstart.md b/content/20.zksync-node/05.quickstart.md new file mode 100644 index 00000000..da82feaf --- /dev/null +++ b/content/20.zksync-node/05.quickstart.md @@ -0,0 +1,87 @@ +--- +title: Quick Start Guide for zkSync Node +description: +--- + +## Prerequisites + +- **Installations Required:** + - [Docker](https://docs.docker.com/get-docker/) + - [Docker Compose](https://docs.docker.com/compose/install/) + +## Setup Instructions + +1. Clone the zkSync Era repository and navigate to the zkSync node guide: + + ```bash + git clone https://github.com/matter-labs/zksync-era.git + cd zksync-era/docs/guides/external-node + ``` + +## Running a zkSync Node Locally + +### Starting the Node + +- **For a Mainnet instance:** + + ```bash + cd docker-compose-examples + docker compose --file mainnet-external-node-docker-compose.yml up + ``` + +- **For a Testnet instance:** + + ```bash + cd docker-compose-examples + docker compose --file testnet-external-node-docker-compose.yml up + ``` + +### Resetting the Node State + +- **For a Mainnet instance:** + + ```bash + cd docker-compose-examples + docker compose --file mainnet-external-node-docker-compose.yml down --volumes + ``` + +- **For a Testnet instance:** + + ```bash + cd docker-compose-examples + docker compose --file testnet-external-node-docker-compose.yml down --volumes + ``` + +### Monitoring Node Status + +Access the local Grafana dashboard to see the node status after recovery: +[Local Grafana Dashboard](http://localhost:3000/d/0/external-node). + +### API Access + +- **HTTP JSON-RPC API:** Port `3060` +- **WebSocket API:** Port `3061` + +### Important Notes + +- **Initial Recovery:** The node will recover from a snapshot on its first run, which may take up to 10 hours. During +this period, the API server will not serve any requests. +- **Historical Data:** For access to historical transaction data, consider recovery from DB dumps. Refer to the Advanced Setup section for more details. +- **DB Dump:** For nodes that operate from a DB dump, which allows starting a zkSync node with a full historical +transactions history, refer to the documentation on running from DB dumps at [03_running.md](https://github.com/matter-labs/zksync-era/blob/main/docs/guides/external-node/03_running.md). + +## System Requirements + +The following are minimal requirements: + +- **CPU:** A relatively modern CPU is recommended. +- **RAM:** 32 GB +- **Storage:** + - **Testnet Nodes:** 30 GB + - **Mainnet Nodes:** 300 GB, with the state growing about 1TB per month. +- **Network:** 100 Mbps connection (1 Gbps+ recommended) + +## Advanced Setup + +For additional configurations like monitoring, backups, recovery from DB dump or snapshot, and custom PostgreSQL +settings, please refer to the [ansible-en-role repository](https://github.com/matter-labs/ansible-en-role). diff --git a/content/20.zksync-node/10.component-breakdown.md b/content/20.zksync-node/10.component-breakdown.md new file mode 100644 index 00000000..0b0cef51 --- /dev/null +++ b/content/20.zksync-node/10.component-breakdown.md @@ -0,0 +1,88 @@ +--- +title: zkSync Node Components +description: +--- + +This section contains an overview of the zkSync node's main components. + +## API + +The zkSync node can serve both the HTTP and the WS Web3 API, as well as PubSub. +Whenever possible, it provides data based on the local state, with a few exceptions: + +- Submitting transactions: Since it is a read replica, + submitted transactions are proxied to the main node, + and the response is returned from the main node. +- Querying transactions: The zkSync node is not aware of the main node's mempool, + and it does not sync rejected transactions. + Therefore, if a local lookup for a transaction or its receipt fails, + the zkSync node will attempt the same query on the main node. + +Apart from these cases, the API does not depend on the main node. +Even if the main node is temporarily unavailable, the zkSync node can continue to serve the state it has locally. + +## Fetcher + +The Fetcher component is responsible for maintaining synchronization between the zkSync node and the main node. +Its primary task is to fetch new blocks in order to update the local chain state. +However, its responsibilities extend beyond that. +For instance, the Fetcher is also responsible for keeping track of L1 batch statuses. +This involves monitoring whether locally applied batches have been committed, proven, or executed on L1. + +It is worth noting that in addition to fetching the _state_, the zkSync node also retrieves the L1 gas price from the main node +for the purpose of estimating fees for L2 transactions (since this also happens based on the local state). +This information is necessary to ensure that gas estimations are performed in the exact same manner as the main node, +thereby reducing the chances of a transaction not being included in a block. + +## State Keeper / VM + +The State Keeper component serves as the "sequencer" part of the node. +It shares most of its functionality with the main node, with one key distinction. +The main node retrieves transactions from the mempool and has the authority to decide when a specific L2 block or L1 batch should be sealed. +On the other hand, the zkSync node retrieves transactions from the queue populated by the Fetcher and seals the corresponding blocks/batches +based on the data obtained from the Fetcher queue. + +The actual execution of batches takes place within the VM, which is identical in any zkSync node. + +## Reorg Detector + +In zkSync Era, it is theoretically possible for L1 batches to be reverted before the corresponding "execute" operation +is applied on L1, that is before the block is [final](/zk-stack/concepts/finality). +Such situations are highly uncommon and typically occur due to significant issues: +e.g. a bug in the sequencer implementation preventing L1 batch commitment. +Prior to batch finality, the zkSync operator can perform a rollback, +reverting one or more batches and restoring the blockchain state to a previous point. +Finalized batches cannot be reverted at all. + +However, even though such situations are rare, the zkSync node must handle them correctly. + +To address this, the zkSync node incorporates a Reorg Detector component. +This module keeps track of all L1 batches that have not yet been finalized. +It compares the locally obtained state root hashes with those provided by the main node's API. +If the root hashes for the latest available L1 batch do not match, +the Reorg Detector searches for the specific L1 batch responsible for the divergence. +Subsequently, it rolls back the local state and restarts the node. +Upon restart, the zkSync node resumes normal operation. + +## Consistency Checker + +The main node API serves as the primary source of information for the zkSync node. +However, relying solely on the API may not provide sufficient security since the API data could potentially be incorrect due to various reasons. +The primary source of truth for the rollup system is the L1 smart contract. +Therefore, to enhance the security of the EN, each L1 batch undergoes cross-checking against +the L1 smart contract by a component called the Consistency Checker. + +When the Consistency Checker detects that a particular batch has been sent to L1, +it recalculates a portion of the input known as the "block commitment" for the L1 transaction. +The block commitment contains crucial data such as the state root and batch number, +and is the same commitment that is used for generating a proof for the batch. +The Consistency Checker then compares the locally obtained commitment with the actual commitment sent to L1. +If the data does not match, it indicates a potential bug in either the main node +or zkSync node implementation or that the main node API has provided incorrect data. +In either case, the state of the zkSync node cannot be trusted, and the zkSync node enters a crash loop until the issue is resolved. + +## Health check server + +The zkSync node also exposes an additional server that returns HTTP 200 response when the zkSync node is operating normally, +and HTTP 503 response when some of the health checks don't pass (e.g. when the zkSync node is not fully initialized yet). +This server can be used, for example, to implement the readiness probe in an orchestration solution you use. diff --git a/content/20.zksync-node/20.configuration.md b/content/20.zksync-node/20.configuration.md new file mode 100644 index 00000000..b1fe6a9a --- /dev/null +++ b/content/20.zksync-node/20.configuration.md @@ -0,0 +1,74 @@ +--- +title: Configuration +description: +--- + +This document outlines various configuration options for the zkSync node. Currently, the zkSync node requires the definition of numerous +environment variables. To streamline this process, we provide prepared configs for the zkSync Era - for both +mainnet and testnet. You can use +these files as a starting point and modify only the necessary sections. + +## Database + +The zkSync node uses two databases: PostgreSQL and RocksDB. + +PostgreSQL serves as the main source of truth in the zkSync node, so all the API requests fetch the state from there. The +PostgreSQL connection is configured by the `DATABASE_URL`. Additionally, the `DATABASE_POOL_SIZE` variable defines the +size of the connection pool. + +RocksDB is used in components where IO is a bottleneck, such as the State Keeper and the Merkle tree. If possible, it is +recommended to use an NVME SSD for RocksDB. RocksDB requires two variables to be set: `EN_STATE_CACHE_PATH` and +`EN_MERKLE_TREE_PATH`, which must point to different directories. + +## L1 Web3 client + +The zkSync node requires a connection to an Ethereum node. The corresponding env variable is `EN_ETH_CLIENT_URL`. Make sure to set +the URL corresponding to the correct L1 network (L1 mainnet for L2 mainnet and L1 sepolia for L2 testnet). + +Note: Currently, the zkSync node makes 2 requests to the L1 per L1 batch, so the Web3 client usage for a synced node should not +be high. However, during the synchronization phase the new batches would be persisted on the zkSync node quickly, so make sure +that the L1 client won't exceed any limits (e.g. in case you use Infura). + +## Exposed ports + +The dockerized version of the server exposes the following ports: + +- HTTP JSON-RPC: `3060` +- WebSocket JSON-RPC: `3061` +- Prometheus listener: `3322` +- Healthcheck server: `3081` + +While the configuration variables for them exist, you are not expected to change them unless you want to use the zkSync node +outside of provided docker environment (not supported at the time of writing). +::callout{icon="i-heroicons-information-circle" color="blue"} +**NOTE**: if the Prometheus port is configured, it must be [scraped](https://prometheus.io/docs/introduction/overview/) +periodically to avoid a memory leak due to a +[bug in an external metrics library](https://github.com/metrics-rs/metrics/issues/245). +If you are not intending to use the metrics, leave this port not configured, and the metrics won't be collected. +:: + +## API limits + +There are variables that allow you to fine-tune the limits of the RPC servers, such as limits on the number of returned +entries or the limit for the accepted transaction size. Provided files contain sane defaults that are recommended for +use, but these can be edited, e.g. to make the zkSync node more/less restrictive. + +## JSON-RPC API namespaces + +There are 7 total supported API namespaces: `eth`, `net`, `web3`, `debug` - standard ones; `zks` - rollup-specific one; +`pubsub` - a.k.a. `eth_subscribe`; `en` - used by zkSync nodes while syncing. You can configure what namespaces you +want to enable using `EN_API_NAMESPACES` and specifying namespace names in a comma-separated list. By default, all but +the `debug` namespace are enabled. + +## Logging and observability + +`MISC_LOG_FORMAT` defines the format in which logs are shown: `plain` corresponds to the human-readable format, while +the other option is `json` (recommended for deployments). + +`RUST_LOG` variable allows you to set up the logs granularity (e.g. make the zkSync node emit fewer logs). You can read about the +format [here](https://docs.rs/env_logger/0.10.0/env_logger/#enabling-logging). + +`MISC_SENTRY_URL` and `MISC_OTLP_URL` variables can be configured to set up Sentry and OpenTelemetry exporters. + +If Sentry is configured, you also have to set `EN_SENTRY_ENVIRONMENT` variable to configure the environment in events +reported to sentry. diff --git a/content/20.zksync-node/30.running-node.md b/content/20.zksync-node/30.running-node.md new file mode 100644 index 00000000..7e941f85 --- /dev/null +++ b/content/20.zksync-node/30.running-node.md @@ -0,0 +1,80 @@ +--- +title: Running a Node +description: +--- + +This section assumes that you have prepared a configuration file as described on the previous page. + +## System Requirements for nodes started from DB dumps + +This configuration is approximate and should be considered as **minimal** requirements. + +- 32-core CPU +- 64GB RAM +- SSD storage (NVME recommended): + - Sepolia Testnet - 10GB zkSync node + 50GB PostgreSQL (at the time of writing, will grow over time, so should be + constantly monitored) + - Mainnet - 3TB zkSync node + 8TB PostgreSQL (at the time of writing, will grow over time, so should be constantly + monitored) +- 100 Mbps connection (1 Gbps+ recommended) + +### A note about PostgreSQL storage + +By far, the heaviest table to maintain is the `call_traces` table. This table is only required for the `debug` +namespace. If you want to clear some space and aren't using the `debug` namespace, you can + +- clear it with a simple query `DELETE FROM call_traces;` +- leave the `debug` namespace disabled via the `EN_API_NAMESPACES` env var as described in the + example config. + +## Infrastructure + +You need to set up a PostgreSQL server with SSD storage: + +- Testnet - ~1TB (at the time of writing) and will grow over time, so should be constantly monitored +- Mainnet - ~2TB (at the time of writing) and will grow over time, so should be constantly monitored + +Setting up Postgres is out of the scope of these docs, but the popular choice is to run it in Docker. There are many of +guides on that, [here's one example](https://www.docker.com/blog/how-to-use-the-postgres-docker-official-image/). + +Note however that if you run Postgres as a stand-alone Docker image (e.g. not in Docker-compose with a network shared +between zkSync node and Postgres), zkSync node won't be able to access Postgres via `localhost` or `127.0.0.1` URLs. To make it work, +you'll have to either run it with a `--network host` (on Linux) or use `host.docker.internal` instead of `localhost` in +the zkSync node configuration (official docs). + +Besides running Postgres, you are expected to have a DB dump from a corresponding env. You can restore it using +`pg_restore -O -C <DUMP_PATH> --dbname=<DB_URL>`. + +Steps how to connect from a Docker container to a service on the host can be found [here](https://docs.docker.com/desktop/networking/#i-want-to-connect-from-a-container-to-a-service-on-the-host) + +## Running + +Assuming you have the zkSync node Docker image, an env file with the prepared configuration, and you have restored your DB with +the pg dump, that is all you need. + +Sample running command: + +```sh +docker run --env-file <path_to_env_file> --mount type=bind,source=<local_rocksdb_data_path>,target=<configured_rocksdb_data_path> <image> +``` + +Helm charts and other infrastructure configuration options, if required, would be available later. + +## First start + +When you start the node for the first time, the state in PostgreSQL corresponds to the dump you have used, but the state +in RocksDB (mainly the Merkle tree) is absent. Before the node can make any progress, it has to rebuild the state in +RocksDB and verify consistency. The exact time required for that depends on the hardware configuration, but it is +reasonable to expect the state rebuild on the mainnet to take more than 20 hours. + +## Redeploying the zkSync node with a new PG dump + +If you've been running the zkSync node for some time and are going to redeploy it using a new PG dump, you should + +- Stop the zkSync node +- Remove SK cache (corresponding to `EN_STATE_CACHE_PATH`) +- Remove your current DB +- Restore with the new dump +- Start the zkSync node + +Monitoring the node behavior and analyzing the state it's in is covered in the observability section. diff --git a/content/20.zksync-node/40.api-overview.md b/content/20.zksync-node/40.api-overview.md new file mode 100644 index 00000000..b9a4d695 --- /dev/null +++ b/content/20.zksync-node/40.api-overview.md @@ -0,0 +1,116 @@ +--- +title: API Overview +description: +--- + +::callout{icon="i-heroicons-information-circle" color="blue"} +The API exposed by the zkSync node is designed to be Web3-compliant. +Any deviation from the Ethereum behavior is likely unintended, and we encourage users to report such discrepancies. +:: + +### `eth_` Namespace + +Data getters in this namespace operate in the L2 domain. They deal with L2 block numbers, check balances in L2, and more. + +| Method | Notes | +| ----------------------------------------- | ------------------------------------------------------------------------- | +| `eth_blockNumber` | | +| `eth_chainId` | | +| `eth_call` | | +| `eth_estimateGas` | | +| `eth_gasPrice` | | +| `eth_newFilter` | Maximum amount of installed filters is configurable | +| `eth_newBlockFilter` | Same as above | +| `eth_newPendingTransactionsFilter` | Same as above | +| `eth_uninstallFilter` | | +| `eth_getLogs` | Maximum amount of returned entities can be configured | +| `eth_getFilterLogs` | Same as above | +| `eth_getFilterChanges` | Same as above | +| `eth_getBalance` | | +| `eth_getBlockByNumber` | | +| `eth_getBlockByHash` | | +| `eth_getBlockTransactionCountByNumber` | | +| `eth_getBlockTransactionCountByHash` | | +| `eth_getCode` | | +| `eth_getStorageAt` | | +| `eth_getTransactionCount` | | +| `eth_getTransactionByHash` | | +| `eth_getTransactionByBlockHashAndIndex` | | +| `eth_getTransactionByBlockNumberAndIndex` | | +| `eth_getTransactionReceipt` | | +| `eth_protocolVersion` | | +| `eth_sendRawTransaction` | | +| `eth_syncing` | zkSync node is considered synced if it's less than 11 blocks behind the main node. | +| `eth_coinbase` | Always returns a zero address | +| `eth_accounts` | Always returns an empty list | +| `eth_getCompilers` | Always returns an empty list | +| `eth_hashrate` | Always returns zero | +| `eth_getUncleCountByBlockHash` | Always returns zero | +| `eth_getUncleCountByBlockNumber` | Always returns zero | +| `eth_mining` | Always returns false | + +### **PubSub** + +This is exclusively available on the WebSocket servers. + +| Method | Notes | +| ------------------ | ----------------------------------------------- | +| `eth_subscribe` | Maximum amount of subscriptions is configurable | +| `eth_subscription` | | + +### `net_` Namespace + +| Method | Notes | +| ---------------- | -------------------- | +| `net_version` | | +| `net_peer_count` | Always returns 0 | +| `net_listening` | Always returns false | + +### `web3_` Namespace + +| Method | Notes | +| -------------------- | ----- | +| `web3_clientVersion` | | + +### `debug_` Namespace + +This namespace provides a set of non-standard RPC methods for developers to inspect and debug calls and transactions. +By default, this namespace is disabled but can be activated using the `EN_API_NAMESPACES` setting. +Please refer to the configuration section for more details. + +| Method | Notes | +| -------------------------- | ----- | +| `debug_traceBlockByNumber` | | +| `debug_traceBlockByHash` | | +| `debug_traceCall` | | +| `debug_traceTransaction` | | + +### `zks` Namespace + +This namespace holds rollup-specific extensions to the Web3 API. +Only the methods documented are deemed public. Other methods in this namespace, though exposed, are not stable and may change without notice. + +| Method | Notes | +| ----------------------------- | ----- | +| `zks_estimateFee` | | +| `zks_estimateGasL1ToL2` | | +| `zks_getAllAccountBalances` | | +| `zks_getBlockDetails` | | +| `zks_getBridgeContracts` | | +| `zks_getBytecodeByHash` | | +| `zks_getConfirmedTokens` | | +| `zks_getL1BatchBlockRange` | | +| `zks_getL1BatchDetails` | | +| `zks_getL2ToL1LogProof` | | +| `zks_getL2ToL1MsgProof` | | +| `zks_getMainContract` | | +| `zks_getRawBlockTransactions` | | +| `zks_getTestnetPaymaster` | | +| `zks_getTokenPrice` | | +| `zks_getTransactionDetails` | | +| `zks_L1BatchNumber` | | +| `zks_L1ChainId` | | + +### `en` Namespace + +This namespace includes methods that zkSync node call on the main node during syncing. If this namespace is active, other ENs can sync using this node. diff --git a/content/20.zksync-node/50.observability.md b/content/20.zksync-node/50.observability.md new file mode 100644 index 00000000..61a289ca --- /dev/null +++ b/content/20.zksync-node/50.observability.md @@ -0,0 +1,57 @@ +--- +title: Observability +description: +--- + +The zkSync node provides several options for setting up observability. Configuring logs and sentry is described in the +configuration section, so this section focuses on the exposed metrics. + +This section is written with the assumption that you're familiar with +[Prometheus](https://prometheus.io/docs/introduction/overview/) and [Grafana](https://grafana.com/docs/). + +## Buckets + +By default, latency histograms are distributed in the following buckets (in seconds): + +```bash +[0.001, 0.005, 0.025, 0.1, 0.25, 1.0, 5.0, 30.0, 120.0] +``` + +## Metrics + +The zkSync node exposes a lot of metrics, a significant amount of which aren't interesting outside the development flow. This +section's purpose is to highlight metrics that may be worth observing in the external setup. + +If you are not planning to scrape Prometheus metrics, please unset `EN_PROMETHEUS_PORT` environment variable to prevent +memory leaking. + +| Metric name | Type | Labels | Description | +| ---------------------------------------------- | --------- | ------------------------------------- | ------------------------------------------------------------------ | +| `external_node_synced` | Gauge | - | 1 if synced, 0 otherwise. Matches `eth_call` behavior | +| `external_node_sync_lag` | Gauge | - | How many blocks behind the main node the zkSync node is | +| `external_node_fetcher_requests` | Histogram | `stage`, `actor` | Duration of requests performed by the different fetcher components | +| `external_node_fetcher_cache_requests` | Histogram | - | Duration of requests performed by the fetcher cache layer | +| `external_node_fetcher_miniblock` | Gauge | `status` | The number of the last L2 block update fetched from the main node | +| `external_node_fetcher_l1_batch` | Gauge | `status` | The number of the last batch update fetched from the main node | +| `external_node_action_queue_action_queue_size` | Gauge | - | Amount of fetched items waiting to be processed | +| `server_miniblock_number` | Gauge | `stage`=`sealed` | Last locally applied L2 block number | +| `server_block_number` | Gauge | `stage`=`sealed` | Last locally applied L1 batch number | +| `server_block_number` | Gauge | `stage`=`tree_lightweight_mode` | Last L1 batch number processed by the tree | +| `server_processed_txs` | Counter | `stage`=`mempool_added, state_keeper` | Can be used to show incoming and processing TPS values | +| `api_web3_call` | Histogram | `method` | Duration of Web3 API calls | +| `sql_connection_acquire` | Histogram | - | Time to get an SQL connection from the connection pool | + +## Interpretation + +After applying a dump, the zkSync node has to rebuild the Merkle tree to verify the correctness of the state in PostgreSQL. +During this stage, `server_block_number { stage='tree_lightweight_mode' }` is increasing from 0 to +`server_block_number { stage='sealed' }`, while the latter does not increase (the zkSync node needs the tree to be up-to-date to +progress). + +After that, the zkSync node has to sync with the main node. `server_block_number { stage='sealed' }` is increasing, and +`external_node_sync_lag` is decreasing. + +Once the node is synchronized, it is indicated by the `external_node_synced`. + +Metrics can be used to detect anomalies in configuration, which is described in more detail in the +next section. diff --git a/content/20.zksync-node/60.troubleshooting.md b/content/20.zksync-node/60.troubleshooting.md new file mode 100644 index 00000000..c83fd501 --- /dev/null +++ b/content/20.zksync-node/60.troubleshooting.md @@ -0,0 +1,62 @@ +--- +title: Troubleshooting +description: +--- + +The zkSync node tries to follow the fail-fast principle: if an anomaly is discovered, instead of attempting state recovery, in +most cases it will restart. Most of the time it will manifest as crashes, and if it happens once, it shouldn't be +treated as a problem. + +However, if the node enters the crash loop or otherwise behaves unexpectedly, it may indicate either a bug in the +implementation or a problem with configuration. This section tries to cover common problems. + +## Panics + +Panics is the Rust programming language notion of irrecoverable errors, and normally if panic happens, the application +will immediately crash. + +- Panic matching `called Result::unwrap() on an Err value: Database(PgDatabaseError)`: problem communicating with the + PostgreSQL, most likely some of the connections have died. +- Panic matching `failed to init rocksdb: Error { message: "IO error: No space left on device"}`: more space on SSD is + required. +- Anything that mentions "Poison Error": a "secondary" panic that may occur if one of the components panicked first. If + you see this panic, look for a panic that happened shortly before it to find the real cause. + +Other kinds of panic aren't normally expected. While in most cases, the state will be recovered after a restart, please +[report](https://zksync.io/contact) such cases to Matter Labs regardless. + +## Genesis Issues + +The zkSync node is supposed to start with an applied DB dump. If you see any genesis-related errors, it probably means the zkSync node was +started without an applied dump. + +Feel free to [contact_us](https://zksync.io/contact) in case of related questions. + +## Logs + +_Note: logs with the `error` level are reported to Sentry if it's configured. If you notice unneeded alerts there that +you don't consider actionable, you may disable logs for a component by tweaking the configuration._ + +| Level | Log substring | Interpretation | +| ----- | ----------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------- | +| ERROR | "One of the tokio actors unexpectedly finished" | One of the components crashed, and the node is restarting. | +| WARN | "Stop signal received, <component\> is shutting down" | Satellite log of the message above | +| ERROR | "A lot of requests to the remote API failed in a row" | The remote API used to update token lists is probably down. Logs should disappear once API is available. | +| WARN | "Server returned an error status code: 429" | The main API rate limits are too strict. [Contact](https://zksync.io/contact) Matter Labs to discuss the situation. | +| WARN | "Following transport error occurred" | There was a problem with fetching data from the main node. | +| WARN | "Unable to get the gas price" | There was a problem with fetching data from the main node. | +| WARN | "Consistency checker error" | There are problems querying L1, check the Web3 URL you specified in the config. | +| WARN | "Reorg detected" | Reorg was detected on the main node, the zkSync node will rollback and restart | + +Same as with panics, normally it's only a problem if a WARN+ level log appears many times in a row. + +## Metrics anomalies + +The following common anomalies can be discovered by observing metrics _after the tree is rebuilt to match the DB +snapshot_: + +- `external_node_sync_lag` doesn't decrease and `external_node_action_queue_action_queue_size` is near 0. Cause: The + fetcher can't fetch new blocks quickly enough. Most likely, the network connection is too slow. +- `external_node_sync_lag` doesn't decrease and `external_node_action_queue_action_queue_size` is at some high level. + Cause: The State Keeper doesn't process fetched data quickly enough. Most likely, a more powerful CPU is needed. +- `sql_connection_acquire` skyrockets. Probably, there are not enough connections in the pool to match the demand. diff --git a/content/20.zksync-node/_dir.yml b/content/20.zksync-node/_dir.yml new file mode 100644 index 00000000..52949170 --- /dev/null +++ b/content/20.zksync-node/_dir.yml @@ -0,0 +1 @@ +title: zkSync Node diff --git a/content/30.ecosystem/00.index.md b/content/30.ecosystem/00.index.md new file mode 100644 index 00000000..d925e19d --- /dev/null +++ b/content/30.ecosystem/00.index.md @@ -0,0 +1,75 @@ +--- +title: Explore the Ecosystem +description: Explore the zkSync Era ecosystem, a comprehensive suite of services and tools from wallets to marketplaces that empower and enhance your experience. +--- + +Explore Wallets, Data services, Node providers, Marketplaces, Oracles, and much, much more +in the zkSync Era ecosystem. +To see a wider list of the ecosystem, check out [zkSync Era on DappRadar](https://zksync.dappradar.com/ecosystem). + +::card-group + ::card + --- + title: Wallets + icon: i-heroicons-wallet-solid + to: /ecosystem/wallets + --- + Access and manage your assets seamlessly with secure and interoperable wallets. + :: + ::card + --- + title: Data Indexers + icon: i-heroicons-circle-stack-solid + to: /ecosystem/data-indexers + --- + Harness the power of enhanced data retrieval to facilitate rich query capabilities. + :: + ::card + --- + title: Node Providers + icon: i-heroicons-server-solid + to: /ecosystem/node-providers + --- + Connect to zkSync Era reliably with robust and scalable node services. + :: + ::card + --- + title: Marketplaces + icon: i-heroicons-shopping-bag-solid + to: /ecosystem/nft-marketplaces + --- + Explore digital goods with dynamic NFT marketplaces, fostering unique collectible exchanges. + :: + ::card + --- + title: Oracles + icon: i-heroicons-adjustments-horizontal-solid + to: /ecosystem/oracles + --- + Integrate real-world data securely to enhance application functionalities. + :: + ::card + --- + title: Bridges + icon: i-heroicons-cursor-arrow-ripple-16-solid + to: /ecosystem/bridges + --- + Experience seamless asset transfers between different blockchain ecosystems + :: + ::card + --- + title: Faucets + icon: i-heroicons-currency-dollar-solid + to: /ecosystem/network-faucets + --- + Get free test tokens for development purposes in the zkSync test network. + :: + ::card + --- + title: Monitoring + icon: i-heroicons-chart-bar-solid + to: /ecosystem/monitoring + --- + Keep track of network operations with our comprehensive monitoring tools. + :: +:: diff --git a/content/30.ecosystem/10.bridges.md b/content/30.ecosystem/10.bridges.md new file mode 100644 index 00000000..be7ea6d6 --- /dev/null +++ b/content/30.ecosystem/10.bridges.md @@ -0,0 +1,50 @@ +--- +title: Bridges +description: +--- + +Bridges are pivotal in enhancing interoperability between different networks or layers, facilitating seamless asset and +data transfer. In this section, we delve into various bridge solutions integrated within the zkSync ecosystem, providing +developers and users with diverse options for cross-chain or cross-layer interactions. + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of bridging options within the zkSync ecosystem, feel free to explore the +[bridges](https://zksync.dappradar.com/ecosystem?category=defi_bridge&page=1) category on Dappradar. +:: + +## zkSync Portal Bridge + +The [Portal Bridge](https://bridge.zksync.io/) on zkSync provides a gateway for assets between Ethereum and the zkSync +network, ensuring secure and efficient transfers. + +## Across + +[Across Bridge](https://app.across.to/bridge) is the most efficient, secure, and fastest way to transfer assets across blockchains. +It features gas-optimized contracts, aggregated verification, and an intents-based design for maximum capital efficiency. +Leveraging a competitive network of relayers, Across ensures bridge orders are filled within seconds. + +## MES Protocol + +[MES Protocol](https://mesprotocol.com/) is a cross-chain bridge that provides instant, low-cost and easy-to-use asset +transfer between EVM and non-EVM compatible chains. + +## Omnibtc Finance + +[Omnibtc Finance](https://www.omnibtc.finance/) operates as a decentralized platform offering cross-chain swap, lending, +and borrowing services. Its primary objective is to integrate and harmonize on-chain liquidity across various networks. + +## Orbiter Finance + +[Orbiter Finance](https://www.orbiter.finance/?source=Ethereum&dest=zkSync%20Era&token=ETH) is a cross-chain rollup +protocol designed to enable safe, economical, and swift transfer of messages or assets across different networks. + +## Owlto Finance + +[Owlto Finance](https://owlto.finance/) is focused on creating a decentralized cross-rollup bridge, emphasizing layer 2 +solutions to promote scalability and interoperability. + +## Symbiosis + +[Symbiosis](https://app.symbiosis.finance/bridge) is a cross-chain AMM DEX that pools together liquidity from different networks: +L1s and L2s, EVM and non-EVM. +With Symbiosis, you can easily swap any token and move your assets across different networks. diff --git a/content/30.ecosystem/100.wallets.md b/content/30.ecosystem/100.wallets.md new file mode 100644 index 00000000..bd7689d5 --- /dev/null +++ b/content/30.ecosystem/100.wallets.md @@ -0,0 +1,91 @@ +--- +title: Wallets +description: +--- + +The following wallets are known for their robustness, ease of use, and compatibility with zkSync +Era. These wallets offer various features including, but not limited to, DeFi, NFT management, +and multiple chain support. + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of wallets within the zkSync ecosystem, feel free to explore the +[wallets](https://zksync.dappradar.com/ecosystem?category=non_dapps_wallets&page=1) +category on Dappradar. +:: + +## BlockWallet + +[BlockWallet](https://blockwallet.io/networks/zksync-wallet) is designed for zkSync users seeking +a decentralized wallet in a user-friendly package. + +**Availability**: Chrome extension + +## Clave + +[Clave](https://getclave.io/) is a zkSync native non-custodial smart wallet powered by account +abstraction and the hardware-level security elements to simplify the onchain experience for the +next billions. + +**Availability**: App Store (IOS and MacOS), Google Play Store + +## Echoo + +[Echoo](https://www.echooo.xyz/) combines MPC and AI technologies to offer a next-generation AA +smart contract wallet with low-cost Layer 2 capabilities. + +**Availability**: App store, Google Play + +## Enkrypt + +[Enkrypt](https://www.enkrypt.com/networks/zksync-wallet/) provides native support for zkSync and enables seamless building and deployment on zkSync. + +**Availability**: Browser extension (all browsers) + +## Holdstation + +[Holdstation](https://holdstation.com/) emphasizes self-custodial and private +transactions, driven by Account Abstraction and Layer-2 solutions. + +**Availability**: App store, Google Play + +## MetaMask + +[MetaMask](https://metamask.io/) is a popular crypto wallet offering a wide range of +features and is trusted by over 30 million users worldwide. + +**Availability**: Browser extension + +## OKX Wallet + +[OKX Wallet](https://www.okx.com/web3) offers a diverse range of Web3 functionalities +at your fingertips. + +**Availability**: Mobile, Desktop, Chrome Extension + +## Pier Wallet + +[Pier Wallet](https://www.pierwallet.com/) enables Web3 integration through its leading smart contract wallet as a service. + +**Availability**: Web app + +## Rabby Wallet + +[Rabby Wallet](https://rabby.io/) is an open-source browser plugin that facilitates a secure multi-chain experience in the DeFi ecosystem. + +**Availability**: Chrome extension, Desktop (macOS, Windows) + +## YaspFi + +[YaspFi](https://yasp.fi/) is focused on sustainable DeFi yield and offers an all-in-one solution. + +**Availability**: Web App, Chrome browser extension + +## Zerion + +[Zerion](https://zerion.io/) enables you to manage your DeFi and NFT portfolios, trade across 10+ networks, and connect to decentralized applications. + +**Availability**: Web, Android, iOS, macOS + +Choose a wallet based on your specific requirements for transaction types, chain +support, and additional functionalities. Always remember to conduct your own due +diligence before using any third-party service. diff --git a/content/30.ecosystem/20.cross-chain.md b/content/30.ecosystem/20.cross-chain.md new file mode 100644 index 00000000..3ed85e3b --- /dev/null +++ b/content/30.ecosystem/20.cross-chain.md @@ -0,0 +1,11 @@ +--- +title: Cross Chain +description: +--- + +## LayerZero + +[LayerZero](https://layerzero.network) is an interoperability protocol that connects blockchains (50+ and counting), allowing developers to build seamless +omnichain applications, tokens, and experiences. The protocol relies on immutable on-chain +endpoints, a configurable Security Stack, and a permissionless set of Executors +to transfer censorship-resistant messages between chains. diff --git a/content/30.ecosystem/30.data-indexers.md b/content/30.ecosystem/30.data-indexers.md new file mode 100644 index 00000000..9630fab7 --- /dev/null +++ b/content/30.ecosystem/30.data-indexers.md @@ -0,0 +1,75 @@ +--- +title: Data Indexers +description: +--- + +## Overview + +Welcome to the Analytics page, a comprehensive hub dedicated to interacting with data services, +analytic tooling on zkSync Era. Each guide includes hands-on examples, ensuring that both +newcomers and experienced developers can seamlessly harness the power of the analytical tooling within the zkSync Era. + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of options within the zkSync ecosystem, feel free to explore the +[infrastructure and developer tools](https://zksync.dappradar.com/ecosystem?page=1&category=non_dapps_infrastructure%2Cnon_dapps_developer_tools) +categories on Dappradar. +:: + +## Covalent + +[Covalent](https://www.covalenthq.com/docs/networks/zksync-era/) provides the industry-leading +Unified API bringing visibility to billions of Web3 data points. Developers use Covalent to +build exciting multi-chain applications like crypto wallets, NFT galleries, and investor dashboard tools utilizing data. + +## DipDup + +[DipDup](https://dipdup.io/) is a Python framework for building smart contract indexers. It +helps developers focus on business logic instead of writing a boilerplate to store and serve +data. DipDup-based indexers are selective, which means only required data is requested. This +approach allows to achieve faster indexing times and decreased load on underlying APIs. + +## Dune + +[Dune](https://dune.com/home) is a powerful data indexing service for blockchain, +providing a comprehensive platform to query, visualize, share, and export data across 30+ blockchains. +It enables users to collaboratively build and analyze blockchain data with the support of Web3's largest data community, +making it an essential tool for anyone involved in the crypto space. + +## Flair + +[Flair](https://docs.flair.dev/) offers reusable **indexing primitives** (such as +fault-tolerant RPC ingestors, custom processors, re-org aware database integrations) +to make it easy to receive, transform, store and access your on-chain data. + +## Goldsky + +[Goldsky](https://goldsky.com/) is a data indexer for web3 builders, +offering high-performance subgraph hosting and realtime data replication pipelines. +Goldsky offers two core self-serve products, Subgraphs and Mirror, +that can be used independently or in conjunction to power your data stack. + +## Graph Network + +[The Graph](https://thegraph.com/) is a decentralized protocol for indexing and querying +blockchain data. The Graph makes it possible to query data that is difficult to query directly. +Use The Graph network today to index protocol data on zkSync! + +## Space & Time + +[Space and Time](https://www.spaceandtime.io/) is the verifiable compute layer that scales +zero-knowledge proofs on a decentralized data warehouse to deliver trustless data processing to +smart contracts, LLMs, and enterprises. Space and Time joins indexed blockchain data from major chains with offchain datasets. + +## SubQuery Network + +[SubQuery](https://subquery.network/) is a fast, flexible, and reliable open-source data +indexer that provides you with custom APIs for your web3 project across all of our supported +chains. + +## Subsquid + +[Subsquid](https://subsquid.io/) is a decentralized indexing toolkit optimized for batch +extraction of large volumes of data. It currently serves historical on-chain data including +event logs, transaction receipts, traces and per-transaction state diffs. Subsquid offers a +powerful toolkit for creating custom data extraction and processing pipelines, achieving +indexing speeds of up to 150k blocks per second. diff --git a/content/30.ecosystem/40.ide.md b/content/30.ecosystem/40.ide.md new file mode 100644 index 00000000..24392f98 --- /dev/null +++ b/content/30.ecosystem/40.ide.md @@ -0,0 +1,27 @@ +--- +title: IDE +description: +--- + +## Overview + +Welcome to the **IDE page**, a comprehensive hub dedicated to interacting with ready-to-use +interactive developer environments using zkSync Era. Each guide has been curated to offer +hands-on examples, ensuring that both newcomers and experienced developers can seamlessly get +started with developing on zkSync Era. + +## Atlas IDE + +[**Atlas**](https://www.atlaszk.com/) provides a robust and user-friendly environment to write, +test, and deploy your smart contracts in a matter of minutes. Discover the potential and get +started with Atlas today and deploy your first contract on zkSync Era by following this [video tutorial](https://www.youtube.com/watch?v=TL-QnxoPyUY)! + +## Remix IDE + +The [Remix](https://remix.ethereum.org/) plugin for zkSync Era is live, providing a smooth, +user-friendly interface for developers of all skill levels to engage with the zkSync ecosystem. +The plugin simplifies the writing and deployment of zkSync’s smart contracts, making it +accessible to newcomers and experienced users. + +Follow the [The zkSync Era Remix Plugin: A How-To Guide](https://medium.com/nethermind-eth/the-zksync-era-remix-plugin-a-how-to-guide-fc54e8d24bd3), +written by [Nethermind](https://x.com/NethermindEth), the team who developed the plugin. diff --git a/content/30.ecosystem/50.monitoring.md b/content/30.ecosystem/50.monitoring.md new file mode 100644 index 00000000..7db9684f --- /dev/null +++ b/content/30.ecosystem/50.monitoring.md @@ -0,0 +1,48 @@ +--- +title: Monitoring +description: +--- + +Monitoring is a crucial aspect of the development and maintenance phases for any blockchain +network. It provides insights into the performance, health, and other operational aspects of +the network and applications. In this section, we explore key tools that offer monitoring +solutions, aiding developers in keeping a close watch on their projects within the zkSync +ecosystem. These tools provide a platform for analytics, real-time monitoring, and data +aggregation which are essential for making informed decisions. + +## Elliptic + +[Elliptic](https://www.elliptic.co/), a leader in cryptoasset risk management, has integrated support for zkSync, +a Layer 2 scaling solution for Ethereum, to bolster its blockchain analytics capabilities. +This partnership enables organizations building on zkSync, such as hybrid exchange GRVT, +to use Elliptic's real-time wallet and transaction screening tools for efficient compliance and risk management. +The integration leverages zkSync's scalable, low-cost solution to Ethereum's high gas fees and slow transactions, +enhancing security and transparency. +This collaboration signifies a significant step in providing comprehensive blockchain support +and advancing the zkSync ecosystem with robust analytics tools. + +## Coinfirm / Lukka + +[Lukka and Coinfirm](https://lukka.tech/) have joined forces with zkSync to provide advanced wallet screening solutions for the zkSync network. +As the preferred provider for crypto-asset funds and fund service providers, Lukka is trusted by industry leaders such as State Street, +Polychain, and eToro for its precise reporting and Enterprise Data Management solutions. +Coinfirm's blockchain analytics platform offers robust transaction, wallet, and cluster monitoring for crypto assets. +This partnership will enhance zkSync's ecosystem by ensuring accurate and comprehensive wallet screening, +supporting secure and transparent operations for users and businesses. + +## Dune Analytics + +[Dune Analytics](https://dune.xyz/docs) is a web-based platform tailored for querying public +blockchain data and aggregating it into visually appealing dashboards. With blockchain networks +being open and transparent yet unique, Dune Analytics provides the necessary tools for +cross-chain data analysis covering various tokens, wallets, and protocols. It also fosters a +community-centric environment by allowing users to share their analytical work effortlessly. + +## Zetta Blocks + +[Zetta Blocks](https://www.zettablock.com/) stands as an enterprise-grade, full-stack Web3 +infrastructure focusing on indexing and analytics while bridging on-chain and off-chain data. +It empowers developers to construct real-time, reliable GraphQL APIs via SQL swiftly, +eliminating the concerns of data processing on both frontends and backends. Zetta Blocks is an +epitome of how seamless monitoring and analytics can be integrated into the blockchain +development workflow. diff --git a/content/30.ecosystem/60.network-faucets.md b/content/30.ecosystem/60.network-faucets.md new file mode 100644 index 00000000..9b132c68 --- /dev/null +++ b/content/30.ecosystem/60.network-faucets.md @@ -0,0 +1,33 @@ +--- +title: Network Faucets +description: +--- + +To access the testnet funds (Sepolia) you can use one of the following third party faucets: + +## %%zk_testnet_name%% faucet by Chainstack + +[Chainstack Faucet](https://faucet.chainstack.com/zksync-testnet-faucet) is an easy to use +Multi-Chain Faucet. You can use Chainstack Faucet to claim %%zk_testnet_name%% **0.05 ETH +every 24 hours**. Chainstack API key is required. + +## LearnWeb3 + +You can use [LearnWeb3's %%zk_testnet_name%%](https://learnweb3.io/faucets/zksync_sepolia/) +to claim %%zk_testnet_name%% **0.01 ETH per day**. GitHub authentication is required. + +## Sepolia faucets + +Use any of the following faucets to claim SepoliaETH, which you can bridge to %%zk_testnet_name%% +using the [zkSync Bridge](https://portal.zksync.io/bridge?network=sepolia). + +- [LearnWeb3 Sepolia faucet](https://learnweb3.io/faucets/sepolia) +- [QuickNode Sepolia faucet](https://faucet.quicknode.com/ethereum/sepolia) +- [Alchemy Sepolia faucet](https://sepoliafaucet.com/) +- [Proof of Work Sepolia faucet](https://sepolia-faucet.pk910.de/) +- [Infura Sepolia faucet](https://www.infura.io/faucet/sepolia/) +- [Ethereum Ecosystem Sepolia faucet](https://www.ethereum-ecosystem.com/faucets/ethereum-sepolia) + +## Sepolia USDC faucet + +You can use [Circle's Testnet Faucet](https://faucet.circle.com/) to claim testnet USDC on zkSync Sepolia or Ethereum Sepolia Testnet. diff --git a/content/30.ecosystem/70.nft-marketplaces.md b/content/30.ecosystem/70.nft-marketplaces.md new file mode 100644 index 00000000..13cf0ffd --- /dev/null +++ b/content/30.ecosystem/70.nft-marketplaces.md @@ -0,0 +1,48 @@ +--- +title: NFT Marketplaces +description: +--- + +The following NFT marketplaces are specialized in facilitating the trade, creation, and +management of NFTs. These platforms offer distinct features and benefits. + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of marketplaces within the zkSync ecosystem, feel free to explore the +[marketplaces](https://zksync.dappradar.com/ecosystem?page=1&category=marketplaces) +category on Dappradar. +:: + +## Element + +[Element](https://element.market/) is a community-driven aggregated marketplace where you can +buy and sell NFTs across different platforms, save money, and earn rewards. +**Specialty**: Aggregated Marketplace, Rewards + +## Libera + +[Libera](https://libera.xyz/) is a native NFT marketplace built specifically for the +zkSync Era. +**Specialty**: zkSync Era Native + +## OKX NFT + +[OKX NFT](https://www.okx.com/web3/marketplace/nft) serves as a one-stop decentralized NFT +market, allowing you to create and trade NFTs across multiple blockchains and platforms. +**Specialty**: Multi-Blockchain, One-Stop Market + +## Tevaera + +[Tevaera](https://market.tevaera.com/) is the first paymasters and ONFT powered marketplace on +the zkSync Era. It offers the lowest transaction fees while being fully secured by the Ethereum consensus. +**Specialty**: zkSync Era, Low Fees, Paymasters and ONFT + +## zkMarkets + +[zkMarkets](https://www.zkmarkets.com/zksync-era) is a native NFT marketplace on zkSync, +supporting paymasters and Smart Wallets like Clave. It features a Launchpad, rarity tools, +and aggregated listings. +**Specialty**: Aggregated Marketplace, Paymasters, Smart Accounts, Rarity tools + +Choose a marketplace that aligns with your requirements, whether it's low fees, +multi-blockchain support, or specific zkSync Era functionalities. Always perform your own due +diligence before using any third-party platforms. diff --git a/content/30.ecosystem/80.oracles.md b/content/30.ecosystem/80.oracles.md new file mode 100644 index 00000000..5edfb689 --- /dev/null +++ b/content/30.ecosystem/80.oracles.md @@ -0,0 +1,49 @@ +--- +title: Oracles +description: +--- + +## Overview + +Welcome to the Oracles page, a comprehensive hub dedicated to interacting with oracle services +on zkSync Era. As the demand for decentralized applications continues, the need for reliable +and efficient oracle services becomes paramount. Within these sections, you'll unearth +specialized usage guides and tangible examples designed to facilitate seamless interactions +with a variety of different oracle services. + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of infrastructure options within the zkSync ecosystem, feel free to explore the +[infrastructure](https://zksync.dappradar.com/ecosystem?page=1&category=non_dapps_infrastructure) +category on Dappradar. +:: + +### Chainlink + +[Chainlink](https://docs.chain.link/data-feeds/price-feeds/addresses?network=zksync&page=1) private feeds provide +secure, reliable, and tamper-proof price data for your smart contracts. + +## DIA + +[DIA](https://docs.diadata.org/products/token-price-feeds) token price feeds provide smart +contract real-time price information of 3,000+ cryptocurrencies, transparently sourced from 80+ +trusted, high-volume DEXs and CEXs. Check out the usage guide below to get started today! + +### Gelato + +[Gelato](https://docs.gelato.network/web3-services/vrf/understanding-vrf) provides access to Verifiable Random +Functions (VRF) on zkSync. VRFs are cryptographic primitives that generate pseudorandom numbers in a way that is +provably secure and verifiable. A VRF allows a holder of a private key to produce a random number along with a proof +that the number was generated legitimately (making it publically verifiable). More information, including how to use +VRFs in your dApp, can be found in the Gelato docs. + +## Pyth + +[Pythnet](https://docs.pyth.network/price-feeds) price feeds use a "pull" +price update model, where users are responsible for posting price updates on-chain when needed. +Checkout the usage guide to get started today! + +## RedStone + +[RedStone](https://docs.redstone.finance/docs/introduction) delivers frequently updated, +reliable, and diverse data feeds for your dApp and smart contracts. Check out all the price +feeds available to zkSync Era and get started with the provided usage guide. diff --git a/content/30.ecosystem/90.node-providers.md b/content/30.ecosystem/90.node-providers.md new file mode 100644 index 00000000..66ace4f2 --- /dev/null +++ b/content/30.ecosystem/90.node-providers.md @@ -0,0 +1,80 @@ +--- +title: RPC Providers +description: +--- + +::callout{icon="i-heroicons-information-circle-16-solid" color="green"} +For an extended list of infrastructure options within the zkSync ecosystem, feel free to explore the +[infrastructure](https://zksync.dappradar.com/ecosystem?page=1&category=non_dapps_infrastructure) +category on Dappradar. +:: + +### Alchemy + +[Alchemy](https://www.alchemy.com/zksync) is a leading developer platform with powerful APIs, SDKs, and tools to build +truly scalable onchain apps. Deploy on zkSync Mainnet and zkSync Sepolia Testnet using Alchemy's free and [paid plans](https://www.alchemy.com/pricing). + +## Ankr + +[Ankr](https://www.ankr.com/rpc/zksync_era/) provides private and public RPC endpoints for +zkSync, powered by a globally distributed and decentralized network of nodes. They offer free +and [paid plans](https://www.ankr.com/rpc/pricing/) with increased request limits. + +## BlockPI + +[BlockPI](https://blockpi.io/zksync) is a high-quality, robust, and efficient RPC service +network that provides access to zkSync nodes with [free and paid plans](https://docs.blockpi.io/documentations/pricing). + +## Chainstack + +[Chainstack](https://chainstack.com/) is a leading blockchain infrastructure providing company. +Build, run and scale blockchain applications using Chainstack nodes. + +## Chainbase + +[Chainbase](https://chainbase.com/chainNetwork/zksync) API is supercharged for reliability, +data correctness, and scalability. Chainbase will handle all the forks, upgrades, and network interruptions + +## DRPC + +[DRPC](https://drpc.org/public-endpoints/zksync) offers access to distributed network of +independent third-party partners and public nodes for zkSync. They provide a free tier that +allows for an unlimited amount of requests over public nodes, or a paid tier which provides +access to all providers, as well as other additional features. + +## GetBlock + +[GetBlock](https://getblock.io/nodes/zksync/) provides access to zkSync API endpoint for your +project. With GetBlock you don’t need to know how to run zkSync nodes as they are already are +available for mainnet and testnets. + +### NOWNodes + +[NOWNodes](https://nownodes.io/nodes) provides Full Node for zkSync with is a high-quality standart and 24/7 support. Free Plan is available. + +## Quicknode + +[QuickNode](https://www.quicknode.com/chains/zksync) offers access to hosted zkSync nodes as +part of their free Discover Plan. You can configure add-ons, like "Trace Mode" and "Archive +Mode" for an additional cost by upgrading to one of their paid plans. + +## Unifra + +[Unifra](https://unifra.io/) is a Web3 developer platform that provides tools, APIs, and node +infrastructure, and provides access to zkSync nodes that are nodes are reliable, scalable, and +easy to use. + +### Public RPCs + +**Mainnet RPCs:** + +- [https://mainnet.era.zksync.io](https://mainnet.era.zksync.io) +- [https://zksync.drpc.org](https://zksync.drpc.org) +- [https://zksync.meowrpc.com](https://zksync.meowrpc.com) +- [https://zksync-era.blockpi.network/v1/rpc/public](https://zksync-era.blockpi.network/v1/rpc/public) +- [https://go.getblock.io/f76c09905def4618a34946bf71851542](https://go.getblock.io/f76c09905def4618a34946bf71851542) + +**Testnet RPCs:** + +- [https://sepolia.era.zksync.dev](https://sepolia.era.zksync.dev) +- [https://zksync-era-sepolia.blockpi.network/v1/rpc/public](https://zksync-era-sepolia.blockpi.network/v1/rpc/public) diff --git a/content/30.ecosystem/_dir.yml b/content/30.ecosystem/_dir.yml new file mode 100644 index 00000000..39691bd7 --- /dev/null +++ b/content/30.ecosystem/_dir.yml @@ -0,0 +1 @@ +title: Ecosystem diff --git a/content/_partials/_compile-solidity-contracts.md b/content/_partials/_compile-solidity-contracts.md new file mode 100644 index 00000000..27e6d4fa --- /dev/null +++ b/content/_partials/_compile-solidity-contracts.md @@ -0,0 +1,28 @@ +--- +title: Compile Solidity Contract +--- + +Smart contracts deployed to zkSync must be compiled using our custom compiler. +`zksolc` is the compiler used for Solidity. + +To compile the contracts in a project, run the following command: + +::code-group + +```bash [npm] +npm run compile +``` + +```bash [yarn] +yarn compile +``` + +```bash [pnpm] +pnpm run compile +``` + +```bash [bun] +bun run compile +``` + +:: diff --git a/content/_partials/_enable-remix-zksync-plugin.md b/content/_partials/_enable-remix-zksync-plugin.md new file mode 100644 index 00000000..34c05e01 --- /dev/null +++ b/content/_partials/_enable-remix-zksync-plugin.md @@ -0,0 +1,14 @@ +--- +title: Enable zkSync plugin in Remix +--- + +To deploy smart contracts to zkSync via Remix you need to enable the zkSync plugin. + +1. Visit [the Remix website](https://remix.ethereum.org/) +2. Click on the **“🔌 Plugin Manager”** button in the bottom-left corner +3. Search “zksync” and click on the **"Activate"** button. + +![Enable zkSync plugin in Remix](/images/enable-remix-plugin.gif) + +Once activated, you’ll see a new menu item with the zkSync logo. Click on it to see the different options to compile, +deploy, and interact with smart contracts on zkSync. diff --git a/content/_partials/_foundry-create-keystore.md b/content/_partials/_foundry-create-keystore.md new file mode 100644 index 00000000..e74b5b83 --- /dev/null +++ b/content/_partials/_foundry-create-keystore.md @@ -0,0 +1,28 @@ +--- +title: foundry create keystore +--- + +Follow these steps to securely store your wallet's private key to use it in Foundry projects: + +1. **Extract Your Private Key:** If you are using the local era node, use a private key from the available rich + accounts. Otherwise, find your personal wallet's private key. For MetaMask users, here's how to [export your wallet's + private key](https://support.metamask.io/hc/en-us/articles/360015289632-How-to-export-an-account-s-private-key). + +2. **Create a Foundry keystore:** Create a keystore and import your private key by running + +```bash +cast wallet import myKeystore --interactive +# enter your PK when prompted, provide a password, and copy the returned address +``` + +It'll return an address (keystore address). + +::callout{icon="i-heroicons-information-circle" color="blue"} +Note that the name `myKeystore` is arbitrary and can be updated. For our docs, we've chosen this name for consistency. +If you decide to use another name, be sure to reference it when using `cast`. +:: + +#### Using the keystore + +When running commands that require a private key, like `forge create` or `cast send`, use `--account myKeystore --sender <KEYSTORE_ADDRESS>`. This will +require you to enter the keystore password you provided before. diff --git a/content/_partials/_foundry_alpha_warning.md b/content/_partials/_foundry_alpha_warning.md new file mode 100644 index 00000000..0ba84594 --- /dev/null +++ b/content/_partials/_foundry_alpha_warning.md @@ -0,0 +1,8 @@ +--- +title: foundry-zksync alpha warning +--- + +::callout{icon="i-heroicons-information-circle-16-solid" color="amber"} +`foundry-zksync` is still in an alpha stage, so some features might not be fully supported +yet and may not work as fully intended. It is open-sourced and contributions are welcomed. +:: diff --git a/content/_partials/_mainnet-network-details.md b/content/_partials/_mainnet-network-details.md new file mode 100644 index 00000000..743993bc --- /dev/null +++ b/content/_partials/_mainnet-network-details.md @@ -0,0 +1,10 @@ +--- +title: Mainnet Network Details +--- + +- Network Name: `%%zk_mainnet_name%%` +- RPC URL: `%%zk_mainnet_rpc_url%%` +- Chain ID: `%%zk_mainnet_chain_id%%` +- Currency Symbol: `%%zk_mainnet_currency_symbol%%` +- Block Explorer URL: `%%zk_mainnet_block_explorer_url%%` +- WebSocket URL: `%%zk_mainnet_websocket_url%%` diff --git a/content/_partials/_rich-wallets.md b/content/_partials/_rich-wallets.md new file mode 100644 index 00000000..99fc2fff --- /dev/null +++ b/content/_partials/_rich-wallets.md @@ -0,0 +1,44 @@ +--- +title: Rich Wallets +github: https://github.com/matter-labs/local-setup/blob/main/rich-wallets.json +--- + +- **Address:** `0x36615Cf349d7F6344891B1e7CA7C72883F5dc049` + + **Private Key:** `0x7726827caac94a7f9e1b160f7ea819f172f7b6f9d2a97f992c38edeab82d4110` + +- **Address:** `0xa61464658AfeAf65CccaaFD3a512b69A83B77618` + + **Private Key:** `0xac1e735be8536c6534bb4f17f06f6afc73b2b5ba84ac2cfb12f7461b20c0bbe3` + +- **Address:** `0x0D43eB5B8a47bA8900d84AA36656c92024e9772e` + + **Private Key:** `0xd293c684d884d56f8d6abd64fc76757d3664904e309a0645baf8522ab6366d9e` + +- **Address:** `0xA13c10C0D5bd6f79041B9835c63f91de35A15883` + + **Private Key:** `0x850683b40d4a740aa6e745f889a6fdc8327be76e122f5aba645a5b02d0248db8` + +- **Address:** `0x8002cD98Cfb563492A6fB3E7C8243b7B9Ad4cc92` + + **Private Key:** `0xf12e28c0eb1ef4ff90478f6805b68d63737b7f33abfa091601140805da450d93` + +- **Address:** `0x4F9133D1d3F50011A6859807C837bdCB31Aaab13` + + **Private Key:** `0xe667e57a9b8aaa6709e51ff7d093f1c5b73b63f9987e4ab4aa9a5c699e024ee8` + +- **Address:** `0xbd29A1B981925B94eEc5c4F1125AF02a2Ec4d1cA` + + **Private Key:** `0x28a574ab2de8a00364d5dd4b07c4f2f574ef7fcc2a86a197f65abaec836d1959` + +- **Address:** `0xedB6F5B4aab3dD95C7806Af42881FF12BE7e9daa` + + **Private Key:** `0x74d8b3a188f7260f67698eb44da07397a298df5427df681ef68c45b34b61f998` + +- **Address:** `0xe706e60ab5Dc512C36A4646D719b889F398cbBcB` + + **Private Key:** `0xbe79721778b48bcc679b78edac0ce48306a8578186ffcb9f2ee455ae6efeace1` + +- **Address:** `0xE90E12261CCb0F3F7976Ae611A29e84a6A85f424` + + **Private Key:** `0x3eb15da85647edd9a1159a4a13b9e7c56877c4eb33f614546d4db06a51868b1c` diff --git a/content/_partials/_testnet-network-details.md b/content/_partials/_testnet-network-details.md new file mode 100644 index 00000000..ae1307c8 --- /dev/null +++ b/content/_partials/_testnet-network-details.md @@ -0,0 +1,10 @@ +--- +title: Testnet Network Details +--- + +- Network Name: `%%zk_testnet_name%%` +- RPC URL: `%%zk_testnet_rpc_url%%` +- Chain ID: `%%zk_testnet_chain_id%%` +- Currency Symbol: `%%zk_testnet_currency_symbol%%` +- Block Explorer URL: `%%zk_testnet_block_explorer_url%%` +- WebSocket URL: `%%zk_testnet_websocket_url%%` diff --git a/content/index.yml b/content/index.yml new file mode 100644 index 00000000..da53d041 --- /dev/null +++ b/content/index.yml @@ -0,0 +1,45 @@ +title: 'zkSync Docs' +description: + zkSync Docs bring you all information you need about our protocol, APIs, SDKs, ZK Stack, and ZK Chains. Start with our + guides and tutorials, or go deep into our architecture and protocol specification. +navigation: false +hero: + title: 'Unlock the Potential of Layer 2 Scaling with zkSync' + description: 'Explore comprehensive guides, developer tools, and resources to innovate on zkSync.' + orientation: vertical + links: + - label: Start building on zkSync + icon: i-heroicons-arrow-right-20-solid + trailing: true + to: '/build' + size: xl +features: + title: 'Explore zkSync Docs' + items: + - title: 'Getting Started with zkSync' + description: 'Jumpstart your zkSync journey with quickstart guides and fundamental concepts for developers.' + icon: 'i-zksync-zksync-logo' + to: '/build/quick-start' + - title: 'Develop with zksync-cli' + description: 'Boost your development workflow with the zksync-cli tool.' + icon: 'i-simple-icons-windowsterminal' + to: '/build/tooling/zksync-cli' + - title: 'Architecture' + description: 'Learn about the zkSync architecture and how it works under the hood.' + icon: 'i-heroicons-sparkles-20-solid' + to: '/zk-stack' +community: + title: 'Join the zkSync Community' + items: + - title: 'Developer Updates' + description: 'Keep up to date with the latest from the zkSync team on X.' + icon: 'i-simple-icons-x' + to: 'https://x.com/zksyncDevs' + - title: 'GitHub Discussions' + description: 'Get help from the community and contribute to the zkSync project.' + icon: 'i-simple-icons-github' + to: 'https://github.com/zkSync-Community-Hub/zksync-developers/discussions' + - title: 'zkSync Discord' + description: 'Connect with devs and zkSync enthusiasts on Discord.' + icon: 'i-simple-icons-discord' + to: 'https://join.zksync.dev/' diff --git a/cspell-config/cspell-blockchain.txt b/cspell-config/cspell-blockchain.txt new file mode 100644 index 00000000..9bfb4a81 --- /dev/null +++ b/cspell-config/cspell-blockchain.txt @@ -0,0 +1,56 @@ +Aave +arithmetization +BoxUups +Buterin +Dappradar +DRPC +Echoo +Eigen +Enkrypt +ethereum +EVM +evmla +geth +gwei +!HardHat +IERC +inversed +keccak +Kreatorland +merkle +Merklized +mload +nomicfoundation +nomiclabs +Omnibtc +omnichain +ONFT +Owlto +Plonky +poseidon +PREVRANDAO +Pyth +Pythnet +Rabby +satoshi +sepolia +Sepolia +solc +Tevaera +Unifra +Upgradability +uups +UUPS +validium +validiums +viem +Vitalik +vyper +Weth +YaspFi +Yul +Zaverucha +Zeeve +Zerion +Zetta +Zonic diff --git a/cspell-config/cspell-dev.txt b/cspell-config/cspell-dev.txt new file mode 100644 index 00000000..dc15481c --- /dev/null +++ b/cspell-config/cspell-dev.txt @@ -0,0 +1,103 @@ +!NuxtContent +!NuxtUI +!tesnet +.getu128 +.interm +.uadd. +.umin. +Ankr +ansible +binop +blake2s +blake2s256 +cccond +ccret +CHAINID +debian +decommit +decommitment +decommitments +decommits +decommitter +decommitting +decommittment +decommittments +deduplicator +demultiplex +Demuxer +devs +Diataxis +dockerized +Dockerized +dutterbutter +ecadd +ecmul +ewasm +fontaine +Gbps +Gelato +gtlt +hexlify +insize +inttoptr +iszero +jemalloc +JUMPI +legendre +librocksdb +mload +montgomery +MontInvbEEA +mstore +noalias +nocallback +nocapture +nomicfoundation +noprofile +nosync +nuxi +nuxt +Nuxt +nuxtdotjs +nuxtjs +NVMe +NVME +OTLP +plux +postgres +postgresql +preds +prode +prodeh +prodh +prodl +prodm +psql +ptrtoint +Rabby +recid. +REDC +rocksdb +rodata +rustc +rustup +secp +signv +sload +smod +sqlx-cli +sstore +syncvm +tlsv1 +tokio +viem +Viem +VRFs +vue +Vue +Zerion +zext +ZKEVM +zkout +zksolc +zkvyper diff --git a/cspell-config/cspell-misc.txt b/cspell-config/cspell-misc.txt new file mode 100644 index 00000000..734aec30 --- /dev/null +++ b/cspell-config/cspell-misc.txt @@ -0,0 +1,14 @@ +Code4rena +Consensys +Cyfrin +GRVT +Hola +Icones +Immunefi +initializable +Initializable +Lukka +mathbb +mundo +permissioned +Winternitz diff --git a/cspell-config/cspell-zksync.txt b/cspell-config/cspell-zksync.txt new file mode 100644 index 00000000..fa6433fd --- /dev/null +++ b/cspell-config/cspell-zksync.txt @@ -0,0 +1,16 @@ +// zkSync-related words +boojum +!MatterLabs +Matter Labs +Zeek +Zeeks +zkcast +ZKEVM +zkevm +zkforge +zkout +zksolc +zkstack +zksync +zksync-cli +zkvyper diff --git a/cspell.json b/cspell.json new file mode 100644 index 00000000..7dc02790 --- /dev/null +++ b/cspell.json @@ -0,0 +1,70 @@ +{ + "language": "en", + "ignorePaths": [ + "bun.lockb", + "*.css", + "*.config.*", + "node_modules/**", + ".*/**", + "dist/**", + "cspell-config/**", + "package.json" + ], + "caseSensitive": true, + "dictionaries": [ + "bash", + "cpp", + "cryptocurrencies", + "docker", + "css", + "csharp", + "en_GB", + "en_US", + "filetypes", + "fullstack", + "git", + "golang", + "go", + "html", + "java", + "latext", + "misc", + "node", + "npm", + "npm", + "nuxt", + "python", + "rust", + "softwareTerms", + "swift", + "typescript", + "dict-zksync", + "dict-blockchain", + "dict-dev", + "dict-misc" + ], + "dictionaryDefinitions": [ + { + "name": "dict-zksync", + "addWords": true, + "path": "./cspell-config/cspell-zksync.txt" + }, + { + "name": "dict-blockchain", + "addWords": true, + "path": "./cspell-config/cspell-blockchain.txt" + }, + { + "name": "dict-dev", + "addWords": true, + "path": "./cspell-config/cspell-dev.txt" + }, + { + "name": "dict-misc", + "addWords": true, + "path": "./cspell-config/cspell-misc.txt" + } + ], + "allowCompoundWords": true, + "flagWords": ["hte", "hve", "teh", "cna"] +} diff --git a/error.vue b/error.vue new file mode 100644 index 00000000..9db3021a --- /dev/null +++ b/error.vue @@ -0,0 +1,55 @@ +<script setup lang="ts"> +import type { NuxtError } from '#app'; +import type { ParsedContent } from '@nuxt/content/dist/runtime/types'; + +useSeoMeta({ + title: 'Page not found', + description: 'We are sorry but this page could not be found.', +}); + +defineProps({ + error: { + type: Object as PropType<NuxtError>, + required: true, + }, +}); + +useHead({ + htmlAttrs: { + lang: 'en', + }, +}); + +const { data: navigation } = await useAsyncData('navigation', () => fetchContentNavigation()); +const { data: files } = useLazyFetch<ParsedContent[]>('/api/search.json', { + default: () => [], + server: false, +}); + +provide('navigation', navigation); +</script> + +<template> + <div> + <HeaderComponent :search="true" /> + + <UMain> + <UContainer> + <UPage> + <UPageError :error="error" /> + </UPage> + </UContainer> + </UMain> + + <FooterComponent /> + + <ClientOnly> + <LazyUContentSearch + :files="files" + :navigation="navigation" + /> + </ClientOnly> + + <UNotifications /> + </div> +</template> diff --git a/eslint.config.mjs b/eslint.config.mjs new file mode 100644 index 00000000..00aa3d9c --- /dev/null +++ b/eslint.config.mjs @@ -0,0 +1,18 @@ +// @ts-check +import withNuxt from './.nuxt/eslint.config.mjs'; + +export default withNuxt({ + ignores: ['*.d.ts', '**/node_modules/**', '**/build/**', '**/dist/**', '**/.*/**', '**/coverage/**'], + rules: { + 'vue/html-self-closing': [ + 'error', + { + html: { + void: 'always', + normal: 'always', + component: 'always', + }, + }, + ], + }, +}); diff --git a/firebase.json b/firebase.json new file mode 100644 index 00000000..d132b093 --- /dev/null +++ b/firebase.json @@ -0,0 +1,283 @@ +{ + "hosting": { + "public": ".output/public", + "ignore": ["firebase.json", "**/.*", "**/node_modules/**"], + "redirects": [ + { + "source": "/sdks/js/:path*.@(html|md)", + "destination": "/sdk/js/ethers/v5/:path", + "type": 301 + }, + { + "source": "/sdks/js/zksync-ethers/:path*.@(html|md)", + "destination": "/sdk/js/ethers/v6/:path", + "type": 301 + }, + { + "source": "/sdks/:path*.@(html|md)", + "destination": "/sdk/:path", + "type": 301 + }, + { + "source": "/build/quick-start/*.@(html|md)", + "destination": "/build/quick-start", + "type": 301 + }, + { + "source": "/build/api.html", + "destination": "/build/api-reference", + "type": 301 + }, + { + "source": "/build/tooling/zksync-cli/getting-started.html", + "destination": "/build/tooling/zksync-cli", + "type": 301 + }, + { + "source": "/build/tooling/network-faucets.html", + "destination": "/ecosystem/network-faucets", + "type": 301 + }, + { + "source": "/build/tooling/hardhat/getting-started.html", + "destination": "/build/tooling/hardhat/getting-started", + "type": 301 + }, + { + "source": "/build/tooling/bridges.html", + "destination": "/ecosystem/bridges", + "type": 301 + }, + { + "source": "/build/tooling/cross-chain.html", + "destination": "/ecosystem/cross-chain", + "type": 301 + }, + { + "source": "/build/tooling/data-indexers.html", + "destination": "/ecosystem/data-indexers", + "type": 301 + }, + { + "source": "/build/tooling/monitoring.html", + "destination": "/ecosystem/monitoring", + "type": 301 + }, + { + "source": "/build/tooling/network-faucets.html", + "destination": "/ecosystem/network-faucets", + "type": 301 + }, + { + "source": "/build/tooling/node-providers.html", + "destination": "/ecosystem/node-providers", + "type": 301 + }, + { + "source": "/build/tooling/oracles.html", + "destination": "/ecosystem/oracles", + "type": 301 + }, + { + "source": "/build/tooling/wallets.html", + "destination": "/ecosystem/wallets", + "type": 301 + }, + { + "source": "/build/tooling/nft-marketplaces.html", + "destination": "/ecosystem/nft-marketplaces", + "type": 301 + }, + { + "source": "/build/tooling/ide.html", + "destination": "/ecosystem/ide", + "type": 301 + }, + { + "source": "/build/tooling/block-explorer/*.@(html|md)", + "destination": "/build/tooling/zksync-block-explorers", + "type": 301 + }, + { + "source": "/build/tooling/hardhat/*.@(html|md)", + "destination": "/build/tooling/hardhat/getting-started", + "type": 301 + }, + { + "source": "/build/tooling/foundry/*.@(html|md)", + "destination": "/build/tooling/foundry/overview", + "type": 301 + }, + { + "source": "/build/test-and-debug/getting-started.html", + "destination": "/build/test-and-debug", + "type": 301 + }, + { + "source": "/build/test-and-debug/era-test-node.html", + "destination": "/build/test-and-debug/in-memory-node", + "type": 301 + }, + { + "source": "/build/test-and-debug/:page.@(html|md)", + "destination": "/build/test-and-debug/:page", + "type": 301 + }, + { + "source": "/build/sdks/js{,/**}", + "destination": "/sdk/js/ethers/v6/getting-started", + "type": 301 + }, + { + "source": "/zk-stack/concepts/zk-chains.html", + "destination": "/zk-stack/concepts/zk-chains", + "type": 301 + }, + { + "source": "/build/developer-reference/rollups.html", + "destination": "/build/developer-reference/intro-rollups", + "type": 301 + }, + { + "source": "/build/developer-reference/differences-with-ethereum.html", + "destination": "/build/developer-reference/ethereum-differences/evm-instructions", + "type": 301 + }, + { + "source": "/build/developer-reference/account-abstraction.html", + "destination": "/build/developer-reference/account-abstraction", + "type": 301 + }, + { + "source": "/build/developer-reference/system-contracts.html", + "destination": "/build/developer-reference/era-contracts/l1-contracts", + "type": 301 + }, + { + "source": "/build/developer-reference/*.@(html|md)", + "destination": "/build/developer-reference", + "type": 301 + }, + { + "source": "/build/support/:post*.@(html|md)", + "destination": "/build/resources/:post", + "type": 301 + }, + { + "source": "/build/tutorials/smart-contract-development/paymasters/custom-paymaster-tutorial.html", + "destination": "https://code.zksync.io/tutorials/erc20-paymaster", + "type": 301 + }, + { + "source": "/build/tutorials/dapp-development/frontend-quickstart-paymaster.html", + "destination": "https://code.zksync.io/tutorials/frontend-paymaster", + "type": 301 + }, + { + "source": "/build/tutorials/dapp-development/gated-nft-paymaster-tutorial.html", + "destination": "https://code.zksync.io/tutorials/dapp-nft-paymaster", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/deposit-eth-to-l2.html", + "destination": "https://code.zksync.io/tutorials/how-to-deposit-eth", + "type": 301 + }, + { + "source": "/build/tutorials/smart-contract-development/account-abstraction/daily-spend-limit.html", + "destination": "https://code.zksync.io/tutorials/daily-spend-limit-account", + "type": 301 + }, + { + "source": "/build/tutorials/smart-contract-development/paymasters/gasless.html", + "destination": "https://code.zksync.io/tutorials/dapp-nft-paymaster", + "type": 301 + }, + { + "source": "/build/tutorials/smart-contract-development/cross-chain-tutorial.html", + "destination": "https://code.zksync.io/tutorials/cross-chain-governance", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/verify-contracts.html", + "destination": "https://code.zksync.io/tutorials/how-to-verify-contracts", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/withdraw-eth-to-l1.html", + "destination": "https://code.zksync.io/tutorials/how-to-withdraw-eth", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/deposit-erc-20-to-l2.html", + "destination": "https://code.zksync.io/tutorials/how-to-deposit-erc20", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/withdraw-erc-20-to-l1.html", + "destination": "https://code.zksync.io/tutorials/how-to-withdraw-erc20", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/send-message-l2-l1.html", + "destination": "https://code.zksync.io/tutorials/how-to-send-l2-l1-message", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/deploy-contract.html", + "destination": "https://code.zksync.io/tutorials/how-to-deploy-contract", + "type": 301 + }, + { + "source": "/build/tutorials/how-to/test-contracts.html", + "destination": "https://code.zksync.io/tutorials/how-to-test-contracts", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/wallet-connect.html", + "destination": "https://code.zksync.io/tutorials/guide-walletconnect", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/wagmi.html", + "destination": "https://code.zksync.io/tutorials/guide-wagmi", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/viem.html", + "destination": "https://code.zksync.io/tutorials/guide-viem", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/web3js.html", + "destination": "https://code.zksync.io/tutorials/guide-web3js", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/the-graph.html", + "destination": "https://code.zksync.io/tutorials/guide-the-graph", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/subquery.html", + "destination": "https://code.zksync.io/tutorials/guide-subquery", + "type": 301 + }, + { + "source": "/build/tutorials/tooling-guides/dipdup.html", + "destination": "https://code.zksync.io/tutorials/guide-dipdup-indexer", + "type": 301 + }, + { + "source": "/build/tutorials{,/**}", + "destination": "https://code.zksync.io/tutorials", + "type": 301 + }, + { + "source": "/build/**/*.@(html|md)", + "destination": "/build", + "type": 301 + } + ] + } +} diff --git a/layouts/build-section.vue b/layouts/build-section.vue new file mode 100644 index 00000000..59917c55 --- /dev/null +++ b/layouts/build-section.vue @@ -0,0 +1,23 @@ +<script setup lang="ts"> +import type { NavItem } from '@nuxt/content/types'; + +const navigation = inject<Ref<NavItem[]>>('navigation', ref([])); +const navTree = computed(() => navigation!.value.find((nav) => nav._path === '/build')?.children || []); +</script> + +<template> + <UContainer> + <UPage> + <template #left> + <UAside> + <UNavigationTree + :links="mapContentNavigation(navTree)" + default-open + :multiple="false" + /> + </UAside> + </template> + <slot /> + </UPage> + </UContainer> +</template> diff --git a/layouts/default.vue b/layouts/default.vue new file mode 100644 index 00000000..56a8b72d --- /dev/null +++ b/layouts/default.vue @@ -0,0 +1,5 @@ +<template> + <div> + <slot /> + </div> +</template> diff --git a/layouts/ecosystem-section.vue b/layouts/ecosystem-section.vue new file mode 100644 index 00000000..1af0c399 --- /dev/null +++ b/layouts/ecosystem-section.vue @@ -0,0 +1,23 @@ +<script setup lang="ts"> +import type { NavItem } from '@nuxt/content/types'; + +const navigation = inject<Ref<NavItem[]>>('navigation', ref([])); +const navTree = computed(() => navigation!.value.find((nav) => nav._path === '/ecosystem')?.children || []); +</script> + +<template> + <UContainer> + <UPage> + <template #left> + <UAside> + <UNavigationTree + :links="mapContentNavigation(navTree)" + default-open + :multiple="false" + /> + </UAside> + </template> + <slot /> + </UPage> + </UContainer> +</template> diff --git a/layouts/zk-stack-section.vue b/layouts/zk-stack-section.vue new file mode 100644 index 00000000..5e7f7ff9 --- /dev/null +++ b/layouts/zk-stack-section.vue @@ -0,0 +1,23 @@ +<script setup lang="ts"> +import type { NavItem } from '@nuxt/content/types'; + +const navigation = inject<Ref<NavItem[]>>('navigation', ref([])); +const navTree = computed(() => navigation!.value.find((nav) => nav._path === '/zk-stack')?.children || []); +</script> + +<template> + <UContainer> + <UPage> + <template #left> + <UAside> + <UNavigationTree + :links="mapContentNavigation(navTree)" + default-open + :multiple="false" + /> + </UAside> + </template> + <slot /> + </UPage> + </UContainer> +</template> diff --git a/layouts/zksync-node-section.vue b/layouts/zksync-node-section.vue new file mode 100644 index 00000000..6d0f8e96 --- /dev/null +++ b/layouts/zksync-node-section.vue @@ -0,0 +1,23 @@ +<script setup lang="ts"> +import type { NavItem } from '@nuxt/content/types'; + +const navigation = inject<Ref<NavItem[]>>('navigation', ref([])); +const navTree = computed(() => navigation!.value.find((nav) => nav._path === '/zksync-node')?.children || []); +</script> + +<template> + <UContainer> + <UPage> + <template #left> + <UAside> + <UNavigationTree + :links="mapContentNavigation(navTree)" + default-open + :multiple="false" + /> + </UAside> + </template> + <slot /> + </UPage> + </UContainer> +</template> diff --git a/nuxt.config.ts b/nuxt.config.ts new file mode 100644 index 00000000..34b73f3e --- /dev/null +++ b/nuxt.config.ts @@ -0,0 +1,45 @@ +// https://nuxt.com/docs/api/configuration/nuxt-config +export default defineNuxtConfig({ + extends: [['github:matter-labs/docs-nuxt-template', { install: true }], ['github:zksync-sdk/sdk-docs']], + modules: ['@nuxt/content', '@nuxt/ui', '@nuxt/eslint', '@nuxtjs/seo', 'nuxt-gtag'], + site: { + name: 'zkSync Docs', + url: process.env.NUXT_SITE_ENV ? 'https://staging-docs.zksync.io' : 'https://docs.zksync.io', + }, + content: { + sources: { + sdk: { + prefix: '/sdk', + driver: 'github', + repo: 'zksync-sdk/sdk-docs', + branch: 'main', + dir: 'content/sdk', + }, + }, + }, + runtimeConfig: { + public: { + app: 'docs', + }, + }, + routeRules: { + '/api/search.json': { prerender: true }, + '/build/tooling': { redirect: '/build/tooling/zksync-block-explorers' }, + '/build/tooling/hardhat': { redirect: '/build/tooling/hardhat/getting-started' }, + '/build/tooling/foundry': { redirect: '/build/tooling/foundry/overview' }, + '/build/developer-reference/ethereum-differences': { + redirect: '/build/developer-reference/ethereum-differences/evm-instructions', + }, + '/build/developer-reference/era-contracts': { redirect: '/build/developer-reference/era-contracts/l1-contracts' }, + '/build/resources': { redirect: '/build/resources/glossary' }, + '/zk-stack/concepts': { redirect: '/zk-stack/concepts/transaction-lifecycle' }, + '/zk-stack/running-a-zk-chain': { redirect: '/zk-stack/running-a-zk-chain/locally' }, + }, + $production: process.env.NUXT_SITE_ENV + ? {} + : { + gtag: { + id: 'G-ELFWXSL45V', + }, + }, +}); diff --git a/package.json b/package.json new file mode 100644 index 00000000..a8067404 --- /dev/null +++ b/package.json @@ -0,0 +1,56 @@ +{ + "name": "zksync-docs", + "type": "module", + "description": "zkSync Docs V3", + "version": "0.0.0", + "license": "MIT", + "devDependencies": { + "@commitlint/cli": "^19.3.0", + "@commitlint/config-conventional": "^19.2.2", + "@types/bun": "^1.0.8", + "@vue/test-utils": "^2.4.5", + "cspell": "^8.6.0", + "eslint": "^8.52.0", + "husky": "^9.0.11", + "lint-staged": "^15.2.4", + "markdownlint": "^0.33.0", + "markdownlint-cli2": "^0.12.1", + "particles.vue3": "^2.12.0", + "prettier": "^3.1.0", + "prettier-plugin-tailwindcss": "^0.5.12", + "vue-tsc": "^2.0.6" + }, + "peerDependencies": { + "typescript": "^5.0.0" + }, + "dependencies": { + "@nuxt/content": "^2.12.1", + "@nuxt/eslint": "^0.3.12", + "@nuxt/fonts": "^0.3.0", + "@nuxt/image": "^1.6.0", + "@nuxt/ui-pro": "^1.0.2", + "@nuxtjs/seo": "^2.0.0-rc.10", + "nuxt-headlessui": "^1.2.0", + "@tsparticles/slim": "^3.3.0", + "@tsparticles/vue3": "^3.0.1", + "nuxt": "^3.11.2", + "nuxt-gtag": "^2.0.6", + "nuxt-og-image": "^3.0.0-rc.45", + "rehype-katex": "^7.0.0", + "remark-math": "^6.0.0", + "vue-easy-lightbox": "^1.19.0" + }, + "scripts": { + "build": "nuxt generate", + "dev": "nuxt dev", + "preview": "nuxt preview", + "lint:spelling": "cspell **/*.md --config=./cspell.json", + "lint:markdown": "markdownlint-cli2 \"content/**/*.md\" --config \".markdownlint.json\"", + "lint:eslint": "eslint .", + "lint:prettier": "prettier --check .", + "fix:prettier": "prettier --write .", + "prepare": "node .husky/install.mjs", + "postinstall": "nuxt prepare", + "ci:check": "bun run lint:eslint && bun run lint:prettier && bun run lint:spelling && bun run lint:markdown" + } +} diff --git a/pages/build/[...slug].vue b/pages/build/[...slug].vue new file mode 100644 index 00000000..02fb99c0 --- /dev/null +++ b/pages/build/[...slug].vue @@ -0,0 +1,9 @@ +<template> + <LayoutDocsBase /> +</template> + +<script setup lang="ts"> +definePageMeta({ + layout: 'build-section', +}); +</script> diff --git a/pages/ecosystem/[...slug].vue b/pages/ecosystem/[...slug].vue new file mode 100644 index 00000000..14ffe77b --- /dev/null +++ b/pages/ecosystem/[...slug].vue @@ -0,0 +1,9 @@ +<template> + <LayoutDocsBase /> +</template> + +<script setup lang="ts"> +definePageMeta({ + layout: 'ecosystem-section', +}); +</script> diff --git a/pages/index.vue b/pages/index.vue new file mode 100644 index 00000000..0b527955 --- /dev/null +++ b/pages/index.vue @@ -0,0 +1,87 @@ +<script setup lang="ts"> +const { data: page } = await useAsyncData('index', () => queryContent('/').findOne()); + +useSeoMeta({ + titleTemplate: '', + title: page.value?.title, + ogTitle: page.value?.title, + description: page.value?.description, + ogDescription: page.value?.description, +}); +</script> + +<template> + <div v-if="page"> + <ULandingHero + v-if="page.hero" + v-bind="page.hero" + > + <ClientOnly> + <vue-particles + id="tsparticles" + url="particles.json" + /> + </ClientOnly> + <template #headline> + <UBadge + v-if="page.hero.headline" + variant="subtle" + size="lg" + class="relative rounded-full font-semibold" + > + <NuxtLink + :to="page.hero.headline.to" + target="_blank" + class="focus:outline-none" + tabindex="-1" + > + <span + class="absolute inset-0" + aria-hidden="true" + /> + </NuxtLink> + + {{ page.hero.headline.label }} + + <UIcon + v-if="page.hero.headline.icon" + :name="page.hero.headline.icon" + class="pointer-events-none ml-1 h-4 w-4" + /> + </UBadge> + </template> + + <template #title> + <MDC :value="page.hero.title" /> + </template> + </ULandingHero> + + <ULandingSection + :title="page.features.title" + :links="page.features.links" + class="py-16 sm:py-12" + > + <UPageGrid> + <ULandingCard + v-for="(item, index) of page.features.items" + :key="index" + v-bind="item" + /> + </UPageGrid> + </ULandingSection> + + <ULandingSection + :title="page.community.title" + :links="page.community.links" + class="py-16 pb-32 sm:py-12 sm:pb-24" + > + <UPageGrid> + <ULandingCard + v-for="(item, index) of page.community.items" + :key="index" + v-bind="item" + /> + </UPageGrid> + </ULandingSection> + </div> +</template> diff --git a/pages/zk-stack/[...slug].vue b/pages/zk-stack/[...slug].vue new file mode 100644 index 00000000..a87a8ff1 --- /dev/null +++ b/pages/zk-stack/[...slug].vue @@ -0,0 +1,9 @@ +<template> + <LayoutDocsBase /> +</template> + +<script setup lang="ts"> +definePageMeta({ + layout: 'zk-stack-section', +}); +</script> diff --git a/pages/zksync-node/[...slug].vue b/pages/zksync-node/[...slug].vue new file mode 100644 index 00000000..a6b871c7 --- /dev/null +++ b/pages/zksync-node/[...slug].vue @@ -0,0 +1,9 @@ +<template> + <LayoutDocsBase /> +</template> + +<script setup lang="ts"> +definePageMeta({ + layout: 'zksync-node-section', +}); +</script> diff --git a/plugins/particles.client.ts b/plugins/particles.client.ts new file mode 100644 index 00000000..a94c0c32 --- /dev/null +++ b/plugins/particles.client.ts @@ -0,0 +1,10 @@ +import Particles from '@tsparticles/vue3'; +import { loadSlim } from '@tsparticles/slim'; + +export default defineNuxtPlugin((nuxtApp) => { + nuxtApp.vueApp.use(Particles, { + init: async (engine) => { + await loadSlim(engine); + }, + }); +}); diff --git a/public/favicon-16x16.png b/public/favicon-16x16.png new file mode 100644 index 00000000..3f32357c Binary files /dev/null and b/public/favicon-16x16.png differ diff --git a/public/favicon-32x32.png b/public/favicon-32x32.png new file mode 100644 index 00000000..08d65d03 Binary files /dev/null and b/public/favicon-32x32.png differ diff --git a/public/favicon.ico b/public/favicon.ico new file mode 100644 index 00000000..a8ff125c Binary files /dev/null and b/public/favicon.ico differ diff --git a/public/images/101-erc20/atlas-deploy-erc20.png b/public/images/101-erc20/atlas-deploy-erc20.png new file mode 100644 index 00000000..5b44ef4a Binary files /dev/null and b/public/images/101-erc20/atlas-deploy-erc20.png differ diff --git a/public/images/101-erc20/atlas-erc20-interact.png b/public/images/101-erc20/atlas-erc20-interact.png new file mode 100644 index 00000000..a1e251ac Binary files /dev/null and b/public/images/101-erc20/atlas-erc20-interact.png differ diff --git a/public/images/101-erc20/erc20-tokens-minted.png b/public/images/101-erc20/erc20-tokens-minted.png new file mode 100644 index 00000000..c9a90746 Binary files /dev/null and b/public/images/101-erc20/erc20-tokens-minted.png differ diff --git a/public/images/101-erc20/remix-erc20-interact.png b/public/images/101-erc20/remix-erc20-interact.png new file mode 100644 index 00000000..c085f6bc Binary files /dev/null and b/public/images/101-erc20/remix-erc20-interact.png differ diff --git a/public/images/101-paymasters/atlas-paymaster-script.png b/public/images/101-paymasters/atlas-paymaster-script.png new file mode 100644 index 00000000..e6c189f0 Binary files /dev/null and b/public/images/101-paymasters/atlas-paymaster-script.png differ diff --git a/public/images/101-paymasters/zksync-paymaster.png b/public/images/101-paymasters/zksync-paymaster.png new file mode 100644 index 00000000..c9fc4b20 Binary files /dev/null and b/public/images/101-paymasters/zksync-paymaster.png differ diff --git a/public/images/101-quickstart/101-atlas-contract.png b/public/images/101-quickstart/101-atlas-contract.png new file mode 100644 index 00000000..e61a8642 Binary files /dev/null and b/public/images/101-quickstart/101-atlas-contract.png differ diff --git a/public/images/101-quickstart/101-atlas-deployed.png b/public/images/101-quickstart/101-atlas-deployed.png new file mode 100644 index 00000000..52202ca7 Binary files /dev/null and b/public/images/101-quickstart/101-atlas-deployed.png differ diff --git a/public/images/101-quickstart/101-contract-deployed.png b/public/images/101-quickstart/101-contract-deployed.png new file mode 100644 index 00000000..3eebc725 Binary files /dev/null and b/public/images/101-quickstart/101-contract-deployed.png differ diff --git a/public/images/101-quickstart/101-contract-events.png b/public/images/101-quickstart/101-contract-events.png new file mode 100644 index 00000000..061598b3 Binary files /dev/null and b/public/images/101-quickstart/101-contract-events.png differ diff --git a/public/images/101-quickstart/101-remix-deploy.png b/public/images/101-quickstart/101-remix-deploy.png new file mode 100644 index 00000000..a524831c Binary files /dev/null and b/public/images/101-quickstart/101-remix-deploy.png differ diff --git a/public/images/101-quickstart/101-remix-interact.png b/public/images/101-quickstart/101-remix-interact.png new file mode 100644 index 00000000..9f30cb09 Binary files /dev/null and b/public/images/101-quickstart/101-remix-interact.png differ diff --git a/public/images/enable-remix-plugin.gif b/public/images/enable-remix-plugin.gif new file mode 100644 index 00000000..0ff173e6 Binary files /dev/null and b/public/images/enable-remix-plugin.gif differ diff --git a/public/images/nuxt-debugger.png b/public/images/nuxt-debugger.png new file mode 100644 index 00000000..291f1f33 Binary files /dev/null and b/public/images/nuxt-debugger.png differ diff --git a/public/images/quickstart-paymasters/crown-mint.png b/public/images/quickstart-paymasters/crown-mint.png new file mode 100644 index 00000000..97722741 Binary files /dev/null and b/public/images/quickstart-paymasters/crown-mint.png differ diff --git a/public/images/remix-plugin-clone-repo.gif b/public/images/remix-plugin-clone-repo.gif new file mode 100644 index 00000000..a7d422ee Binary files /dev/null and b/public/images/remix-plugin-clone-repo.gif differ diff --git a/public/images/zk-stack/Check_if_satisfied-1.png b/public/images/zk-stack/Check_if_satisfied-1.png new file mode 100644 index 00000000..7a7cca20 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-1.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-11.png b/public/images/zk-stack/Check_if_satisfied-11.png new file mode 100644 index 00000000..90f4e9c8 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-11.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-12.png b/public/images/zk-stack/Check_if_satisfied-12.png new file mode 100644 index 00000000..3eee1a4c Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-12.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-13.png b/public/images/zk-stack/Check_if_satisfied-13.png new file mode 100644 index 00000000..d1614be8 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-13.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-14.png b/public/images/zk-stack/Check_if_satisfied-14.png new file mode 100644 index 00000000..0bc9db0a Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-14.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-16.png b/public/images/zk-stack/Check_if_satisfied-16.png new file mode 100644 index 00000000..3b52f8f7 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-16.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-17.png b/public/images/zk-stack/Check_if_satisfied-17.png new file mode 100644 index 00000000..4cf5d878 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-17.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-2.png b/public/images/zk-stack/Check_if_satisfied-2.png new file mode 100644 index 00000000..e47ae614 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-2.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-3.png b/public/images/zk-stack/Check_if_satisfied-3.png new file mode 100644 index 00000000..666694d6 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-3.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-4.png b/public/images/zk-stack/Check_if_satisfied-4.png new file mode 100644 index 00000000..9ee3ad86 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-4.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-7.png b/public/images/zk-stack/Check_if_satisfied-7.png new file mode 100644 index 00000000..9e3d335f Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-7.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-8.png b/public/images/zk-stack/Check_if_satisfied-8.png new file mode 100644 index 00000000..8c59e29a Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-8.png differ diff --git a/public/images/zk-stack/Check_if_satisfied-9.png b/public/images/zk-stack/Check_if_satisfied-9.png new file mode 100644 index 00000000..f1cbfbe9 Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied-9.png differ diff --git a/public/images/zk-stack/Check_if_satisfied.png b/public/images/zk-stack/Check_if_satisfied.png new file mode 100644 index 00000000..bf8412ae Binary files /dev/null and b/public/images/zk-stack/Check_if_satisfied.png differ diff --git a/public/images/zk-stack/arithmetic_opcode.png b/public/images/zk-stack/arithmetic_opcode.png new file mode 100644 index 00000000..5fc835de Binary files /dev/null and b/public/images/zk-stack/arithmetic_opcode.png differ diff --git a/public/images/zk-stack/base-layer-circuit-diagram.png b/public/images/zk-stack/base-layer-circuit-diagram.png new file mode 100644 index 00000000..b11c397e Binary files /dev/null and b/public/images/zk-stack/base-layer-circuit-diagram.png differ diff --git a/public/images/zk-stack/block-layout.png b/public/images/zk-stack/block-layout.png new file mode 100644 index 00000000..c93dea58 Binary files /dev/null and b/public/images/zk-stack/block-layout.png differ diff --git a/public/images/zk-stack/circuit-allocate.png b/public/images/zk-stack/circuit-allocate.png new file mode 100644 index 00000000..d3f07d8d Binary files /dev/null and b/public/images/zk-stack/circuit-allocate.png differ diff --git a/public/images/zk-stack/circuit-commitments.png b/public/images/zk-stack/circuit-commitments.png new file mode 100644 index 00000000..d11b7c92 Binary files /dev/null and b/public/images/zk-stack/circuit-commitments.png differ diff --git a/public/images/zk-stack/circuit-compare-addresses.png b/public/images/zk-stack/circuit-compare-addresses.png new file mode 100644 index 00000000..62cbadc5 Binary files /dev/null and b/public/images/zk-stack/circuit-compare-addresses.png differ diff --git a/public/images/zk-stack/circuit-ecrecover-precompile.png b/public/images/zk-stack/circuit-ecrecover-precompile.png new file mode 100644 index 00000000..4d548a9c Binary files /dev/null and b/public/images/zk-stack/circuit-ecrecover-precompile.png differ diff --git a/public/images/zk-stack/circuit-ecrecover.png b/public/images/zk-stack/circuit-ecrecover.png new file mode 100644 index 00000000..5daaf07f Binary files /dev/null and b/public/images/zk-stack/circuit-ecrecover.png differ diff --git a/public/images/zk-stack/circuit-finite-fields.png b/public/images/zk-stack/circuit-finite-fields.png new file mode 100644 index 00000000..8c2be892 Binary files /dev/null and b/public/images/zk-stack/circuit-finite-fields.png differ diff --git a/public/images/zk-stack/circuit-lookup.png b/public/images/zk-stack/circuit-lookup.png new file mode 100644 index 00000000..99aa7c13 Binary files /dev/null and b/public/images/zk-stack/circuit-lookup.png differ diff --git a/public/images/zk-stack/circuit-pi-diagram.png b/public/images/zk-stack/circuit-pi-diagram.png new file mode 100644 index 00000000..9b0e7d15 Binary files /dev/null and b/public/images/zk-stack/circuit-pi-diagram.png differ diff --git a/public/images/zk-stack/circuit.png b/public/images/zk-stack/circuit.png new file mode 100644 index 00000000..00fedfd4 Binary files /dev/null and b/public/images/zk-stack/circuit.png differ diff --git a/public/images/zk-stack/circuits-address.png b/public/images/zk-stack/circuits-address.png new file mode 100644 index 00000000..9211d0a6 Binary files /dev/null and b/public/images/zk-stack/circuits-address.png differ diff --git a/public/images/zk-stack/circuits-configure-builder.png b/public/images/zk-stack/circuits-configure-builder.png new file mode 100644 index 00000000..37c0c99a Binary files /dev/null and b/public/images/zk-stack/circuits-configure-builder.png differ diff --git a/public/images/zk-stack/circuits-ecrecover-geometry.png b/public/images/zk-stack/circuits-ecrecover-geometry.png new file mode 100644 index 00000000..030a9788 Binary files /dev/null and b/public/images/zk-stack/circuits-ecrecover-geometry.png differ diff --git a/public/images/zk-stack/compiler-toolchain.png b/public/images/zk-stack/compiler-toolchain.png new file mode 100644 index 00000000..bdc451dd Binary files /dev/null and b/public/images/zk-stack/compiler-toolchain.png differ diff --git a/public/images/zk-stack/contracts-external.png b/public/images/zk-stack/contracts-external.png new file mode 100644 index 00000000..364d41fe Binary files /dev/null and b/public/images/zk-stack/contracts-external.png differ diff --git a/public/images/zk-stack/deploy-weth.png b/public/images/zk-stack/deploy-weth.png new file mode 100644 index 00000000..a2ef1bf8 Binary files /dev/null and b/public/images/zk-stack/deploy-weth.png differ diff --git a/public/images/zk-stack/deposit-weth.png b/public/images/zk-stack/deposit-weth.png new file mode 100644 index 00000000..5e24a535 Binary files /dev/null and b/public/images/zk-stack/deposit-weth.png differ diff --git a/public/images/zk-stack/explorer-example.png b/public/images/zk-stack/explorer-example.png new file mode 100644 index 00000000..cd96434a Binary files /dev/null and b/public/images/zk-stack/explorer-example.png differ diff --git a/public/images/zk-stack/hyperbridges.png b/public/images/zk-stack/hyperbridges.png new file mode 100644 index 00000000..915d7769 Binary files /dev/null and b/public/images/zk-stack/hyperbridges.png differ diff --git a/public/images/zk-stack/hyperscalingAggregation.png b/public/images/zk-stack/hyperscalingAggregation.png new file mode 100644 index 00000000..3974aeb2 Binary files /dev/null and b/public/images/zk-stack/hyperscalingAggregation.png differ diff --git a/public/images/zk-stack/hyperscalingBridgingFull.png b/public/images/zk-stack/hyperscalingBridgingFull.png new file mode 100644 index 00000000..2a95a103 Binary files /dev/null and b/public/images/zk-stack/hyperscalingBridgingFull.png differ diff --git a/public/images/zk-stack/hyperscalingFastEconomic.png b/public/images/zk-stack/hyperscalingFastEconomic.png new file mode 100644 index 00000000..9adba046 Binary files /dev/null and b/public/images/zk-stack/hyperscalingFastEconomic.png differ diff --git a/public/images/zk-stack/hyperscalingL3Fast2Blocks.png b/public/images/zk-stack/hyperscalingL3Fast2Blocks.png new file mode 100644 index 00000000..0ba69f51 Binary files /dev/null and b/public/images/zk-stack/hyperscalingL3Fast2Blocks.png differ diff --git a/public/images/zk-stack/hyperscalingLayeredAggregation.png b/public/images/zk-stack/hyperscalingLayeredAggregation.png new file mode 100644 index 00000000..045a2a0a Binary files /dev/null and b/public/images/zk-stack/hyperscalingLayeredAggregation.png differ diff --git a/public/images/zk-stack/hyperscalingSovereignty.png b/public/images/zk-stack/hyperscalingSovereignty.png new file mode 100644 index 00000000..080bd209 Binary files /dev/null and b/public/images/zk-stack/hyperscalingSovereignty.png differ diff --git a/public/images/zk-stack/hyperscalingUniswap.png b/public/images/zk-stack/hyperscalingUniswap.png new file mode 100644 index 00000000..6c0d0d9f Binary files /dev/null and b/public/images/zk-stack/hyperscalingUniswap.png differ diff --git a/public/images/zk-stack/hyperscalingZKPorter.png b/public/images/zk-stack/hyperscalingZKPorter.png new file mode 100644 index 00000000..18a38656 Binary files /dev/null and b/public/images/zk-stack/hyperscalingZKPorter.png differ diff --git a/public/images/zk-stack/l2-components copy.png b/public/images/zk-stack/l2-components copy.png new file mode 100644 index 00000000..b7c062e9 Binary files /dev/null and b/public/images/zk-stack/l2-components copy.png differ diff --git a/public/images/zk-stack/l2-components.png b/public/images/zk-stack/l2-components.png new file mode 100644 index 00000000..b7c062e9 Binary files /dev/null and b/public/images/zk-stack/l2-components.png differ diff --git a/public/images/zk-stack/l2-state-running.png b/public/images/zk-stack/l2-state-running.png new file mode 100644 index 00000000..7fc75c29 Binary files /dev/null and b/public/images/zk-stack/l2-state-running.png differ diff --git a/public/images/zk-stack/l2-state-start.png b/public/images/zk-stack/l2-state-start.png new file mode 100644 index 00000000..8f3da928 Binary files /dev/null and b/public/images/zk-stack/l2-state-start.png differ diff --git a/public/images/zk-stack/vm-mapping.png b/public/images/zk-stack/vm-mapping.png new file mode 100644 index 00000000..e1d0de06 Binary files /dev/null and b/public/images/zk-stack/vm-mapping.png differ diff --git a/public/particles.json b/public/particles.json new file mode 100644 index 00000000..87f5d23d --- /dev/null +++ b/public/particles.json @@ -0,0 +1,67 @@ +{ + "fpsLimit": 120, + "interactivity": { + "events": { + "onClick": { + "enable": true, + "mode": "push" + }, + "onHover": { + "enable": true, + "mode": "repulse" + } + }, + "modes": { + "bubble": { + "distance": 400, + "duration": 2, + "opacity": 0.8, + "size": 40 + }, + "push": { + "quantity": 4 + }, + "repulse": { + "distance": 50, + "duration": 0.4 + } + } + }, + "particles": { + "color": { + "value": "#8b8ccf" + }, + "links": { + "color": "#8b8ccf", + "distance": 150, + "enable": true, + "opacity": 0.5, + "width": 1 + }, + "move": { + "direction": "none", + "enable": true, + "outModes": "bounce", + "random": false, + "speed": 0.5, + "straight": false + }, + "number": { + "density": { + "enable": true + }, + "value": 80 + }, + "opacity": { + "value": 0.5 + }, + "shape": { + "type": "circle" + }, + "size": { + "value": { "min": 1, "max": 5 } + } + }, + "detectRetina": true +} + diff --git a/public/social-card.png b/public/social-card.png new file mode 100644 index 00000000..733c0faf Binary files /dev/null and b/public/social-card.png differ diff --git a/server/api/search.json.get.ts b/server/api/search.json.get.ts new file mode 100644 index 00000000..e075d921 --- /dev/null +++ b/server/api/search.json.get.ts @@ -0,0 +1,7 @@ +import { serverQueryContent } from '#content/server'; + +export default eventHandler(async (event) => { + return serverQueryContent(event) + .where({ _type: 'markdown', navigation: { $ne: false } }) + .find(); +}); diff --git a/server/tsconfig.json b/server/tsconfig.json new file mode 100644 index 00000000..b9ed69c1 --- /dev/null +++ b/server/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "../.nuxt/tsconfig.server.json" +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..c9a4b1e3 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "./.nuxt/tsconfig.json", + "compilerOptions": { + "composite": true, + "strict": true, + "downlevelIteration": true, + "forceConsistentCasingInFileNames": true, + "allowJs": true, + "typeRoots": ["./node_modules/@types", "./types", ".nuxt/nuxt.d.ts"] + } +}