diff --git a/.github/ISSUE_TEMPLATE/documentation_bug_report.md b/.github/ISSUE_TEMPLATE/documentation_bug_report.md deleted file mode 100644 index f1c79e2b9a08f..0000000000000 --- a/.github/ISSUE_TEMPLATE/documentation_bug_report.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -name: "\U0001F41B Documentation/aptos.dev Bug report" -about: Create a bug report to help improve the Aptos Developers' Website -title: "[Docs]" -labels: ["documentation"] -assignees: 'clay-aptos' - ---- - -# Aptos Documentation Issue - - - -## Location - - - - - -## Description - - - - - - - -## Audience - - - - - -## Additional context - - - - - - diff --git a/.github/actions/file-change-determinator/action.yaml b/.github/actions/file-change-determinator/action.yaml index a8bd8f83ef25c..c2501baa9f34c 100644 --- a/.github/actions/file-change-determinator/action.yaml +++ b/.github/actions/file-change-determinator/action.yaml @@ -14,4 +14,4 @@ runs: uses: fkirc/skip-duplicate-actions@v5 with: skip_after_successful_duplicate: false # Don't skip if the action is a duplicate (this may cause false positives) - paths_ignore: '["**/*.md", "developer-docs-site/**"]' + paths_ignore: '["**/*.md"]' diff --git a/.github/workflows/lint-test.yaml b/.github/workflows/lint-test.yaml index c8108e3115a1e..5f06e6884c713 100644 --- a/.github/workflows/lint-test.yaml +++ b/.github/workflows/lint-test.yaml @@ -51,21 +51,6 @@ jobs: - run: echo "Skipping general lints! Unrelated changes detected." if: needs.file_change_determinator.outputs.only_docs_changed == 'true' - # Run the docs linter. This is a PR required job. - docs-lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - with: - node-version-file: .node-version - - uses: pnpm/action-setup@v2 - - run: pnpm lint - working-directory: developer-docs-site - - run: sudo apt update -y && sudo apt install -y aspell aspell-en - - run: pnpm spellcheck - working-directory: developer-docs-site - # Run the crypto hasher domain separation checks rust-cryptohasher-domain-separation-check: needs: file_change_determinator diff --git a/developer-docs-site/.gitattributes b/developer-docs-site/.gitattributes deleted file mode 100644 index d12d8a0491d22..0000000000000 --- a/developer-docs-site/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -*.md -whitespace diff --git a/developer-docs-site/.gitignore b/developer-docs-site/.gitignore deleted file mode 100644 index b021b9473bc90..0000000000000 --- a/developer-docs-site/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -# OSX -*.DS_Store -venv - -# Installation -package-lock.json - -# Docusaurus -build/ -i18n/ -node_modules/ - -# Generated files -.docusaurus/ -.cache-loader -.vercel -.idea/ - -# ignore autogenerated docs -static/docs/rustdocs/ diff --git a/developer-docs-site/.prettierignore b/developer-docs-site/.prettierignore deleted file mode 100644 index 0350ddb57ede6..0000000000000 --- a/developer-docs-site/.prettierignore +++ /dev/null @@ -1,13 +0,0 @@ -# Symlinked SDKs -static/sdks - -# Docusaurus -build/ -i18n/ -node_modules/ - -# Generated files -.docusaurus/ - -# ignore autogenerated docs -static/docs/rustdocs/ diff --git a/developer-docs-site/README.md b/developer-docs-site/README.md index 3dc4e6a505c16..2d2afc9cd6bed 100644 --- a/developer-docs-site/README.md +++ b/developer-docs-site/README.md @@ -1,113 +1,3 @@ # Developer Documentation - - [Installation](#installation) - - [Requirements](#requirements) - - [Fork and clone the Aptos repo](#fork-and-clone-the-aptos-repo) - - [Build and serve the docs locally](#build-and-serve-the-docs-locally) - - [Build static html files](#build-static-html-files) - - [Debug/Format files](#debugging) - -This Aptos Developer Documentation is built using [Docusaurus 2](https://docusaurus.io/) and displayed on https://aptos.dev/. Follow the below steps to build the docs locally and test your contribution. - -We now use [lychee-broken-link-checker](https://github.com/marketplace/actions/lychee-broken-link-checker) to check for broken links in the GitHub Markdown. We are a corresponding link checker for pages on Aptos.dev. - -With results visible at: -https://github.com//aptos-labs/aptos-core/actions/workflows/links.yml - - -## Installation - -**IMPORTANT**: These installation steps apply to macOS environment. - -### Requirements - -Before you proceed, make sure you install the following tools. - -- Install [Node.js](https://nodejs.org/en/download/) by executing the below command on your Terminal: - -``` -brew install node -``` - -- Install the latest [pnpm](https://pnpm.io/installation) by executing the below command on your Terminal: - -``` -curl -fsSL https://get.pnpm.io/install.sh | sh - -``` - -## Clone the Aptos repo - - ``` - git clone https://github.com/aptos-labs/aptos-core.git - - ``` - -## Build and serve the docs locally - -1. `cd` into the `developer-docs-site` directory in your clone. - - ``` - cd aptos-core/developer-docs-site - ``` -2. Run `pnpm`. - - ``` - pnpm install - ``` -This step will configure the Docusaurus static site generator. - -3. Start the server locally. This will also open the locally built docs in your default browser. - -> **NOTE**: This step will not generate static html files, but will render the docs dynamically. - - ``` - pnpm start - ``` - - 4. See your changes staged at: http://localhost:3000/ - - 5. Create a pull request with your changes as described in our [Contributing](https://github.com/aptos-labs/aptos-core/blob/main/CONTRIBUTING.md) README. - -## (Optional) Build static html files - -Execute the below steps if you want to generate static html documentation files. A `build` directory will be created with the static html files and assets contained in it. - -1. Make sure you install dependencies. - - ``` - pnpm install - ``` -2. Build static html files with pnpm. - - ``` - pnpm build - ``` - -This command generates static html content and places it in the `build` directory. - -3. Finally, use the below command to start the documentation server on your localhost. - - ``` - pnpm run serve - ``` - -## Debugging - -Fix formatting issues by running: - -``` -pnpm fmt -``` - -## Regenerating contributors -The src/contributors.json file (which powers the list of Authors at the bottom of doc pages) needs to be manually generated. - -In order to generate the contributor map you must authenticate with GitHub. The best way to do that is using GitHub CLI ([installation guide(https://github.com/cli/cli#installation)]). Once you have the GitHub CLI installed, you can run the following command to authenticate: -``` -gh auth login --scopes read:user,user:email -``` - -Once that is done, you can generate the map with this command: -``` -pnpm contributors -``` +This has been moved to https://github.com/aptos-labs/developer-docs \ No newline at end of file diff --git a/developer-docs-site/babel.config.js b/developer-docs-site/babel.config.js deleted file mode 100644 index bfd75dbdfc72a..0000000000000 --- a/developer-docs-site/babel.config.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - presets: [require.resolve("@docusaurus/core/lib/babel/preset")], -}; diff --git a/developer-docs-site/docs/CODEOWNERS b/developer-docs-site/docs/CODEOWNERS deleted file mode 100644 index 3899b59e62cb8..0000000000000 --- a/developer-docs-site/docs/CODEOWNERS +++ /dev/null @@ -1,9 +0,0 @@ -# This is the overarching CODEOWNERS file for Aptos.dev documentation. -# It exists to help route review requests and ensure proper review of changes. -# We include each subdirectory and relevant owners below: -# Global rule: -* @davidiw @gregnazario @movekevin -## Aptos White Paper -/aptos-white-paper/** @aching @ShaikhAliMo -## Nodes -/nodes/** @rustielin @aptos-labs/prod-eng diff --git a/developer-docs-site/docs/apis/aptos-labs-developer-portal.md b/developer-docs-site/docs/apis/aptos-labs-developer-portal.md deleted file mode 100644 index 5fe8da5ad1204..0000000000000 --- a/developer-docs-site/docs/apis/aptos-labs-developer-portal.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -title: "Aptos Labs Developer Portal" ---- - -import BetaNotice from '../../src/components/_dev_portal_beta_notice.mdx'; - - - -The [Aptos Labs Developer Portal](https://developers.aptoslabs.com) is a your gateway to access Aptos Labs provided APIs in a quick and easy fashion to power your dApp. -It consists of a Portal (UI) and a set of API Gateways operated by Aptos Labs. - -The Developer Portal aims to make it easier to build dApps by: - -1. Providing [unified domain names/URLs](../nodes/networks.md) for each API. -2. Giving you personalized examples on how to use each API. -3. Observability into your personal usage, error rates and latency of APIs. -4. Rate limiting by API developer account/app instead of origin IP. -5. (Coming Soon) Customizable Rate limits for high traffic apps. - -In order to create an Aptos Labs developer account simply go to https://developers.aptoslabs.com/ and follow the instructions. - -### Default Rate Limits for Developer Portal accounts - -Currently the following rate limits apply: - -1. GRPC Transaction Stream: 20 concurrent streams per user -2. Fullnode API: 5000 requests per 5 minutes sliding window. -3. GraphQL API: 5000 requests per 5 minutes sliding window. - - Note that requests for the Fullnode API / GraphQL API are counted separately, so you can make 5000 Fullnode API requests AND 5000 GraphQL API requests in the same 5 minutes window. The rate limit is applied as a continuous sliding window. - -Rate limits are customizable per user upon request. If you have a use-case that requires higher rate limits than the default, please open a support case through one of the supported channels in the portal. - -### Known Limitations - -1. Only authenticated access supported. - - At the moment the new URLs introduced by the Developer Portal / API Gateway only support requests with an API Key (Bearer authentication). - Effectively this means you can only use the new API gateway provided URLs from backend apps that can securely hold credentials. - We plan to add soon support for anonymous authentication in combination with more sophisticated rate limit protections, which then makes then these new URLs usable in end-user / client-side only apps like Browser Wallets etc. diff --git a/developer-docs-site/docs/apis/fullnode-rest-api.md b/developer-docs-site/docs/apis/fullnode-rest-api.md deleted file mode 100644 index 4d6f00e3472e8..0000000000000 --- a/developer-docs-site/docs/apis/fullnode-rest-api.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: "Fullnode Rest API" -slug: "fullnode-rest-api" ---- - -# Use the Aptos Fullnode REST API - -If you with to employ the [Aptos API](https://aptos.dev/nodes/aptos-api-spec/#/), then this guide is for you. This guide will walk you through all you need to integrate the Aptos blockchain into your platform with the Aptos API. - -:::tip -Also see the [System Integrators Guide](../guides/system-integrators-guide.md) for a thorough walkthrough of Aptos integration. -::: - -## Understanding rate limits - -As with the [Aptos Indexer](../indexer/api/labs-hosted.md#rate-limits), the Aptos REST API has a rate limit of 5000 requests per five minutes by IP address, whether submitting transactions or querying the API on Aptos-provided nodes. (As a node operator, you may raise those limits on your own node.) Note that this limit can change with or without prior notice. - -## Viewing current and historical state - -Most integrations into the Aptos blockchain benefit from a holistic and comprehensive overview of the current and historical state of the blockchain. Aptos provides historical transactions, state, and events, all the result of transaction execution. - -* Historical transactions specify the execution status, output, and tie to related events. Each transaction has a unique version number associated with it that dictates its global sequential ordering in the history of the blockchain ledger. -* The state is the representation of all transaction outputs up to a specific version. In other words, a state version is the accumulation of all transactions inclusive of that transaction version. -* As transactions execute, they may emit events. [Events](../concepts/events.md) are hints about changes in on-chain data. - -:::important -Ensure the [fullnode](../nodes/networks.md) you are communicating with is up to date. The fullnode must reach the version containing your transaction to retrieve relevant data from it. There can be latency from the fullnodes retrieving state from [validator fullnodes](../concepts/fullnodes.md), which in turn rely upon [validator nodes](../concepts/validator-nodes.md) as the source of truth. -::: - -The storage service on a node employs two forms of pruning that erase data from nodes: - -* state -* events, transactions, and everything else - -While either of these may be disabled, storing the state versions is not particularly sustainable. - -Events and transactions pruning can be disabled via setting the [`enable_ledger_pruner`](https://github.com/aptos-labs/aptos-core/blob/cf0bc2e4031a843cdc0c04e70b3f7cd92666afcf/config/src/config/storage_config.rs#L141) to `false` in `storage_config.rs`. This is default behavior in Mainnet. In the near future, Aptos will provide indexers that mitigate the need to directly query from a node. - -The REST API offers querying transactions and events in these ways: - -* [Transactions for an account](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_account_transactions) -* [Transactions by version](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_transaction_by_version) -* [Events by event handle](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_events_by_event_handle) - -## Reading state with the View function - -View functions do not modify blockchain state when called from the API. A [View](https://github.com/aptos-labs/aptos-core/blob/main/api/src/view_function.rs) function and its [input](https://github.com/aptos-labs/aptos-core/blob/main/api/types/src/view.rs) can be used to read potentially complex on-chain state using Move. For example, you can evaluate who has the highest bid in an auction contract. Here are related files: - -* [`view_function.rs`](https://github.com/aptos-labs/aptos-core/blob/main/api/src/tests/view_function.rs) for an example -* related [Move](https://github.com/aptos-labs/aptos-core/blob/90c33dc7a18662839cd50f3b70baece0e2dbfc71/aptos-move/framework/aptos-framework/sources/coin.move#L226) code -* [specification](https://github.com/aptos-labs/aptos-core/blob/90c33dc7a18662839cd50f3b70baece0e2dbfc71/api/doc/spec.yaml#L8513). - -The view function operates like the [Aptos Simulation API](../guides/system-integrators-guide.md#testing-transactions-or-transaction-pre-execution), though with no side effects and an accessible output path. View functions can be called via the `/view` endpoint. Calls to view functions require the module and function names along with input type parameters and values. - -A function does not have to be immutable to be tagged as `#[view]`, but if the function is mutable it will not result in state mutation when called from the API. -If you want to tag a mutable function as `#[view]`, consider making it private so that it cannot be maliciously called during runtime. - -In order to use the View functions, you need to [publish the module](../move/move-on-aptos/cli.md#publishing-a-move-package-with-a-named-address) through the [Aptos CLI](../tools/aptos-cli/install-cli/index.md). - -In the Aptos CLI, a view function request would look like this: -``` -aptos move view --function-id devnet::message::get_message --profile devnet --args address:devnet -{ - "Result": [ - "View functions rock!" - ] -} -``` - -In the TypeScript SDK, a view function request would look like this: -``` - const payload: Gen.ViewRequest = { - function: "0x1::coin::balance", - type_arguments: ["0x1::aptos_coin::AptosCoin"], - arguments: [alice.address().hex()], - }; - - const balance = await client.view(payload); - - expect(balance[0]).toBe("100000000"); -``` - -The view function returns a list of values as a vector. By default, the results are returned in JSON format; however, they can be optionally returned in Binary Canonical Serialization (BCS) encoded format. diff --git a/developer-docs-site/docs/apis/index.md b/developer-docs-site/docs/apis/index.md deleted file mode 100644 index 9dc1353c8cfdc..0000000000000 --- a/developer-docs-site/docs/apis/index.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: "Aptos APIs" ---- - -The Aptos Blockchain network can be accessed by several APIs, depending on your use-case. - -1. #### [Aptos Fullnode-embedded REST API](./fullnode-rest-api.md). - - This API - embedded into Fullnodes - provides a simple, low latency, yet low-level way of _reading_ state and _submitting_ transactions to the Aptos Blockchain. It also supports transaction simulation. - -2. #### [Aptos Indexer-powered GraphQL API](../indexer/indexer-landing.md). - - This API provides a high-level, opinionated GraphQL API to _read_ state from the Aptos Blockchain. If your app needs to interact with high level constructs, such as NFTs, Aptos Objects or custom Move contracts, you likely want to incorporate the Aptos GraphQL Indexer API in some fashion. Learn more about the Indexer-powered GraphQL API here. - -3. #### [Aptos GRPC Transaction Stream API](../indexer/txn-stream/index.md) - - This API provides a way to stream historical and current transaction data in real-time to an indexing processor. This API is used by the Aptos Core Indexing infrastructure itself but also can be used to build app-specific custom indexing processors that process blockchain data in real-time. Learn more here. - -4. #### Faucet API (Only Testnet/Devnet) - - This API provides the ability to mint coins on the Aptos Labs operated devnet and testnet and it's primary purpose is development and testing of your apps and Move contracts before deploying them to mainnet. - - -The code of each of the above mentioned APIs is open-sourced on [GitHub](https://github.com/aptos-labs/aptos-core). As such anyone can operate these APIs and many independent operators and builders worldwide choose to do so. - - -### Aptos Labs operated API Deployments - -[Aptos Labs](https://aptoslabs.com) operates a deployment of these APIs on behalf of [Aptos Foundation](https://aptosfoundation.org/) for each [Aptos Network](../nodes/networks.md) and makes them available for public consumption. - -At the moment there are 2 sets of Aptos Labs API deployments: - -1. [APIs with anonymous access and IP-based rate-limiting](../nodes/networks.md) -2. [[Beta] APIs with authentication and developer-account based rate limiting through the Aptos Labs Developer Portal](./aptos-labs-developer-portal.md) diff --git a/developer-docs-site/docs/aptos-white-paper/in-korean.md b/developer-docs-site/docs/aptos-white-paper/in-korean.md deleted file mode 100644 index a43c8cbdf6689..0000000000000 --- a/developer-docs-site/docs/aptos-white-paper/in-korean.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: "한글 (In Korean)" -slug: "aptos-white-paper-in-korean" ---- - -# Aptos 블록체인 -## 안전성, 확장성, 향상성 있는 웹3 인프라 - -초록 - -블록체인이 새로운 인터넷 인프라로 부상하면서 수만개의 탈중앙 애플리케이션이 배포되어 왔다. -불행히도, 블록체인은 잦은 중단, 높은 비용, 낮은 처리량 제한 및 수 많은 보안 문제들로 인해 -사용이 아직 보편화되지 않았다. 블록체인 인프라는 웹3 시대의 대중화를 위해서 신뢰할 수 있고 -확장 가능하고, 비용 효율적이며, 널리 사용되는 애플리케이션의 구축을 위해 지속적으로 발전하는 -플랫폼인 클라우드 인프라의 길을 따라야 한다. - -우리는 이러한 과제를 해결하기 위해 확장성, 안전성, 신뢰성 및 향상성을 핵심 원칙으로 설계된 -Aptos 블록체인을 제시한다. 지난 3년간 전 세계 350명 이상의 개발자들이 Aptos 블록체인을 -개발했다 [1]. Aptos는 합의, 스마트 컨트랙트 설계, 시스템 보안, 성능 및 탈중앙성 면에서 새롭고 -참신한 혁신을 제안한다. 이러한 기술들의 조합은 웹3 대중화를 위한 기본 구성 요소가 될 것이다: - -* 먼저, Aptos 블록체인은 빠르고 안전한 트랜잭션 실행을 위해 Move 언어를 자체적으로 통합하고 -내부적으로 사용한다 [2]. Move Prover는 Move 언어의 정형검증기로써 스마트 컨트랙트의 -불변속성 및 행위에 대한 추가적인 안전을 제공한다. 보안에 대한 이러한 집중을 통해 -개발자는 악성 개체로부터 소프트웨어를 더 잘 보호할 수 있다. -* 둘째, Aptos 데이터 모델은 유연한 키 관리 및 하이브리드 수탁 옵션을 지원한다. 이는 서명 -전 트랜잭션 투명성과 함께 실제 라이트 클라이언트 프로토콜과 함께 보다 안전하고 신뢰할수 -있는 사용자 경험을 제공한다. -* 셋째, 높은 처리량과 낮은 지연 시간을 달성하기 위해 Aptos 블록체인은 트랜잭션 처리의 주요 -단계에 모듈화된 파이프라인 방식을 사용한다. 구체적으로는 트랜잭션 전파, 블록 메타데이터 -정렬, 병렬 트랜잭션 실행, 배치(batch) 스토리지 및 원장 인증 등이 동시에 운영된다. -이 접근 방식은 사용 가능한 모든 물리적 자원을 완벽하게 활용하고, 하드웨어 효율성을 향상 -시키며, 매우 병렬적인 실행을 가능하게 한다. -* 넷째, 데이터에 대한 사전 지식을 읽고 쓰도록 요구함으로써 트랜잭션 원자성을 파괴하는 다른 -병렬 실행 엔진과 달리 Aptos 블록체인은 개발자에게 그러한 제한을 두지 않는다. 임의로 -복잡한 트랜잭션이 원자성을 효율적으로 지원하여 실제 애플리케이션의 처리량을 높이고 대기 -시간을 단축할 수 있으며 개발을 단순화할 수 있다. -* 다섯째, Aptos의 모듈형으로 설계된 아키텍처는 클라이언트 유연성을 지원하고 빈번하고 즉각적인 -업그레이드를 위해 최적화되었다. 또한 Aptos 블록체인은 내장된 온체인 변경 관리 -프로토콜을 제공하여, 혁신적인 새로운 기술을 신속하게 배포하고 새로운 웹3 사용 사례를 지원할 수 있다. - - -## 전체 PDF 버전 - -:::tip 전체 PDF 버전 - -- **초록**: Aptos 백서의 한국어 버전 전체 PDF를 보려면 [여기를 클릭하십시오](/papers/whitepaper-korean.pdf). -- **English**: Get the [full PDF of the Aptos White Paper](/papers/Aptos-Whitepaper.pdf). -::: diff --git a/developer-docs-site/docs/aptos-white-paper/index.md b/developer-docs-site/docs/aptos-white-paper/index.md deleted file mode 100644 index 28ca53dafd786..0000000000000 --- a/developer-docs-site/docs/aptos-white-paper/index.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: "Aptos White Paper" ---- - -# The Aptos Blockchain -## A Safe, Scalable, and Upgradeable Web3 Infrastructure - -Abstract - -The rise of blockchains as a new Internet infrastructure has led to developers deploying tens of -thousands of decentralized applications at rapidly growing rates. Unfortunately, blockchain usage -is not yet ubiquitous due to frequent outages, high costs, low throughput limits, and numerous -security concerns. To enable mass adoption in the web3 era, blockchain infrastructure needs -to follow the path of cloud infrastructure as a trusted, scalable, cost-efficient, and continually -improving platform for building widely-used applications. - -We present the Aptos blockchain, designed with scalability, safety, reliability, and upgradeability -as key principles, to address these challenges. The Aptos blockchain has been developed over the -past three years by over 350+ developers across the globe. It offers new and novel innovations -in consensus, smart contract design, system security, performance, and decentralization. The -combination of these technologies will provide a fundamental building block to bring web3 to the -masses: - -- First, the Aptos blockchain natively integrates and internally uses the Move language for fast -and secure transaction execution. The Move prover, a formal verifier for smart contracts -written in the Move language, provides additional safeguards for contract invariants and -behavior. This focus on security allows developers to better protect their software from -malicious entities. -- Second, the Aptos data model enables flexible key management and hybrid custodial options. -This, alongside transaction transparency prior to signing and practical light client protocols, -provides a safer and more trustworthy user experience. -- Third, to achieve high throughput and low latency, the Aptos blockchain leverages a pipelined -and modular approach for the key stages of transaction processing. Specifically, transaction -dissemination, block metadata ordering, parallel transaction execution, batch storage, and -ledger certification all operate concurrently. This approach fully leverages all available physical resources, improves hardware efficiency, and enables highly parallel execution. -- Fourth, unlike other parallel execution engines that break transaction atomicity by requiring -upfront knowledge of the data to be read and written, the Aptos blockchain does not put -such limitations on developers. It can efficiently support atomicity with arbitrarily complex -transactions, enabling higher throughput and lower latency for real-world applications and -simplifying development. -- Fifth, the Aptos modular architecture design supports client flexibility and optimizes for -frequent and instant upgrades. Moreover, to rapidly deploy new technology innovations -and support new web3 use cases, the Aptos blockchain provides embedded on-chain change -management protocols. -- Finally, the Aptos blockchain is experimenting with future initiatives to scale beyond individual validator performance: its modular design and parallel execution engine support internal -sharding of a validator and homogeneous state sharding provides the potential for horizontal -throughput scalability without adding additional complexity for node operators. - -## Full PDF versions - -:::tip Full PDF versions - -- **English**: Get the [full PDF of the Aptos White Paper](/papers/Aptos-Whitepaper.pdf). -- **Korean**: Get the [Korean version full PDF of the Aptos White Paper](/papers/whitepaper-korean.pdf). -::: diff --git a/developer-docs-site/docs/community/aptos-style.md b/developer-docs-site/docs/community/aptos-style.md deleted file mode 100644 index 9dc129f1117ee..0000000000000 --- a/developer-docs-site/docs/community/aptos-style.md +++ /dev/null @@ -1,229 +0,0 @@ ---- -title: "Follow Aptos Style" -slug: "aptos-style" ---- - -# Follow Aptos Writing Style - -When making [site updates](./site-updates.md), Aptos recommends adhering to this writing and formatting style guide for consistency with the rest of Aptos.dev, as well as accessibility directly in GitHub.com and source code editors. - -## Hold contributions to high standards - -All doc updates should be thorough and tested. This includes external contributions from the community. - -So when reviewing changes, do not merge them in unless all feedback has been addressed. - -## Single source in Markdown - -There should be one external upstream source of truth for Aptos development. And we aim for that to be Aptos.dev. Edit away in [Markdown](https://www.markdownguide.org/basic-syntax/) format using our instructions for making [site updates](./site-updates.md). - -Note, you can easily convert Google Docs to Markdown format using the [Docs to Markdown](https://workspace.google.com/marketplace/app/docs_to_markdown/700168918607) add-on. - -## Link from product to docs - -Whether you work on an external product or an internal tool, your work likely has an interface. From it, you should link to your user docs, along with bug queues and contact information. - -## Peer review docs - -Your users should not be the first people to use your documentation. Have your peers review your docs just as they review your code. Walk through the flow. If they cannot, your users can't either. - -## Form links properly - -When linking to absolute files (code, reference) not on Aptos.dev, always use the fully qualified domain. Else, use relative links. Always include the file extension (`.md` for Markdown). - -Correct: - -```markdown -[How Base Gas Works](../../../../concepts/base-gas.md) -``` - -Incorrect: - -```markdown -[How Base Gas Works](/concepts/base-gas) -``` - -The second example will work in [Aptos.dev](http://Aptos.dev) but not when navigating the docs via [GitHub.com](http://GitHub.com) or in source viewer/editor. For links to files in the same directory, include the leading `./` like so: - -```markdown -[proofs](./txns-states.md#proofs) -``` - -## Use permanent links to code - -When linking to code files in GitHub, use a [permanent link](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/creating-a-permanent-link-to-a-code-snippet) to the relative line or set of lines. - -## Link check your pages - -It never hurts to run a link check against your pages or entire site. Here are some freely available and useful tools for **public** site checking: - - * https://validator.w3.org/checklink - * https://www.drlinkcheck.com/ - -Set recursion depth accordingly to delve into sub-links. - -## Add images to `static` directory - -Place all images in the [`developer-docs-site/static/img`](https://github.com/aptos-labs/aptos-core/tree/main/developer-docs-site/static/img) directory and use relative links to include them. See the image paths in [Set up a React app](../tutorials/build-e2e-dapp/2-set-up-react-app.md) for examples. - -## Redirect moved pages - -Avoid losing users by adding redirects for moved and renamed [Aptos.dev](http://Aptos.dev) pages in: -https://github.com/aptos-labs/aptos-core/blob/main/developer-docs-site/docusaurus.config.js - -## Name files succinctly - -Use short, detailed names with no spaces: -* hyphenate rather than underscore - -* be descriptive -* use noun (topic) first, with verb optional: ex. accounts.md, site-updates.md - -## Use active tense - -Avoid passive tense and gerunds when possible: - -- Good - Use Aptos API -- Not-so-good - Using Aptos API -- Needs improvement - Aptos API Use - -## Employ direct style and tone - -- Address the user directly. Use "you" instead of "user" or "they". -- Avoid writing the way you speak, i.e., avoid using contractions, jokes or using colloquial content. - - 💡 **Example**: - - - **Preferred**: “it will” or “we will” or “it would”. - - **Avoid**: “it’ll” or “we’ll” or “it’d”. - -- Use the active voice. - - 💡 **Example**: - - - **Preferred**: Fork and clone the Aptos repo. - - **Avoid**: The Aptos repo should be cloned. - - **Preferred**: Copy the `Config path` information from the terminal. - - **Avoid**: The `Config path` information should be copied from the terminal. - -- Avoid hypothetical future "would". Instead, write in present tense. - - 💡 **Example**: - - - **Preferred**: "The compiler sends". - - **Avoid**: “The compiler would then send”. - -## Ensure readability - -- Break up walls of text (long passages of text) into smaller chunks for easier reading. -- Use lists. When you use lists, keep each item as distinct as you can from another item. -- Provide context. Your readers can be beginner developers or experts in specialized fields. They may not know what you are talking about without any context. -- Use shorter sentences (26 words or less) They are easier to understand (and translate). -- Define acronyms and abbreviations at the first usage in every topic. -- Keep in mind our documentation is written in US English, but the audience will include people for whom English is not their primary language. -- Avoid culturally specific references, humor, names. -- Write dates and times in unambiguous and clear ways using the [international standard](https://en.wikipedia.org/wiki/Date_format_by_country). Write "27 November 2020" instead of either "11/27/2020" or "27/11/2020" or "November 27, 2020". -- Avoid negative sentence construction. - - 💡 **Example**: - - - **Preferred**: It is common. - - **Avoid**: It is not uncommon. - - Yes there is a subtle difference between the two, but for technical writing this simplification works better. - -- Avoid directional language (below, left) in procedural documentation, **unless** you are pointing to an item that is immediately next to it. -- Be consistent in capitalization and punctuation. -- Avoid the `&` character in the descriptive text. Use the English word "and". - -## Avoid foreshadowing - -- Do not refer to future features or products. -- Avoid making excessive or unsupported claims about future enhancements. - -## Use proper casing - -Use title case for page titles and sentence case for section headers. Ex: - -- Page title - Integrate Aptos with Your Platform -- Section title - Choose a network - -Of course, capitalize [proper nouns](https://www.scribbr.com/nouns-and-pronouns/proper-nouns/), such as “Aptos” in “Accounts on Aptos” - -## Write clear titles and headings - -- Document titles and section headings should: - - Explicitly state the purpose of the section. - - Be a call to action, or intention. - -This approach makes it easier for the reader to get her specific development task done. - -💡 **Examples** - -- **Preferred**: Running a fullnode (section heading) -- **Avoid**: FullNode running fundamentals (title is not purpose-driven) -- **Preferred**: Creating your first Move module -- **Avoid**: Move module - -**Document titles (h1)** - -- Use title case. For example: "Running a Model" - -A document title is the main title of a document page. A document has only one document title. - -💡 **Example**: "Writing Style Guide" at the beginning of this page. The document title also appears at the top level in the navigation bar, so it must be short, preferably four to five words or less. - - -**Section headings within a document (h2, h3, h4, h5)** - -- Use sentence case. **For example**: "Verify initial synchronization" - -A section heading is the title for an individual section within a document page. - -💡 **Example**: "Titles and headings" at the top of this section. A document page can have multiple sections, and hence multiple section headings. - -- Use a heading hierarchy. Do not skip levels of the heading hierarchy. **For example**, put h3 only under h2. -- To change the visual formatting of a heading, use CSS instead of using a heading level that does not fit the hierarchy. -- Do not keep blank headings or headings with no associated content. -- Avoid using question mark in document titles and section headings. - - 💡 **Example**: - - - **Preferred**: How it works - - **Avoid**: How it works? - -- Avoid using emphasis or italics in document titles or section headings. -- Avoid joining words using a slash. - - 💡 **Example**: - - - **Preferred**: Execute on your macOS or Linux system - - **Avoid**: Execute on your macOS/Linux system - -## Avoid duplication - -We face too many challenges to tackle the same one from scratch again or split our efforts into silos. We must collaborate to make best use of our diverse and growing skillset. - -Search and navigate across this site to see if an existing document already serves your purpose and garners an update before starting anew. As with code, [don't repeat yourself](https://www.wikipedia.org/wiki/Don%27t_repeat_yourself). - -## Use these Aptos words and phrases consistently - -The below table lists the correct usage of Aptos words and phrases. - -| Recommended way to use in mid-sentence | Avoid these forms | -| --- | --- | -| First letter uppercase if appearing at the start of a sentence. | | -| fullnode (FN) | FullNode, Fullnode | -| validator or validator node (VN) | Validator Node, ValidatorNode | -| validator fullnode (VFN) | Validator FullNode or ValidatorFullNode | -| public fullnode | Public FullNode | -| Aptos blockchain | Aptos Blockchain | -| Move module | Move Module | -| Move resource | Move Resource | -| Aptos framework | Aptos Framework | -| Faucet | faucet | -| mempool | Mempool | -| bytecode | bytecodes | -| MoveVM | Move VM | -| REST service | REST Service | -| upgradeable | upgradable | diff --git a/developer-docs-site/docs/community/contributions/remix-ide-plugin.md b/developer-docs-site/docs/community/contributions/remix-ide-plugin.md deleted file mode 100644 index 64d8a89c9a950..0000000000000 --- a/developer-docs-site/docs/community/contributions/remix-ide-plugin.md +++ /dev/null @@ -1,159 +0,0 @@ ---- -title: "Use Remix IDE Plugin" -slug: "remix-ide-plugin" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Use Remix IDE Plugin - -This tutorial explains how to deploy and run Move modules with the [WELLDONE Code Remix IDE](https://docs.welldonestudio.io/code) plugin. This tool offers a graphical interface for developing Move [modules](../../move/book/modules-and-scripts.md#modules). - -Here are the steps to use the Remix IDE plugin for Move (described in detail below): - -1. [Connect to Remix IDE](#step-1-connect-to-remix-ide). -2. [Select a chain](#step-2-select-a-chain). -3. [Install a browser extension wallet](#step-3-install-a-wallet). -4. [Create the project](#step-4-create-the-project). -5. [Compile and publish a Move module to the Aptos blockchain](#step-5-compile-and-publish-a-move-module-to-the-aptos-blockchain). -6. [Interact with a Move module](#step-6-interact-with-a-move-module). - -## Step 1: Connect to Remix IDE - -1. Load the [Remix IDE](https://remix.ethereum.org/). - -2. Accept or decline the personal information agreement and dismiss any demonstrations. - -3. Click the **Plugin Manager** button near the bottom left, search for *CODE BY WELLDONE STUDIO*, and click **Activate**. - -
- -
- -## Step 2: Select a Chain - -Click the newly created icon at the bottom of the left menu. Then, select **Aptos (MoveVM)** from the chain list. - -
- -
- -## Step 3: Install a wallet - -WELLDONE Wallet can be used with the Remix IDE plugin now, with support for [Petra wallet](https://petra.app/) coming soon. See the list of [Aptos wallets](https://github.com/aptos-foundation/ecosystem-projects#wallets) available in the ecosystem. - -This steps assumes you are using the WELLDONE Wallet. Follow [the manual](https://docs.welldonestudio.io/wallet/manual/) to install the wallet and create an account for the Aptos blockchain. Once that is done, follow these steps: - -1. Choose a network (e.g. devnet) in the dropdown menu at the top of the main tab. -1. Go into the **Settings** tab of your wallet and activate **Developer Mode**. - -Now in the Remix UI click the **Connect to WELLDONE** button to connect to the **WELLDONE Wallet**. - -Click the **Refresh** button in the upper right corner of the plug-in to apply changes to your wallet. - -## Step 4: Create the Project - -In Aptos, you can write smart contracts with the [Move programming language](../../move/move-on-aptos.md). **WELLDONE Code** provides two features to help developers new to Aptos and Move. - -### Select a template - -Create simple example contract code written in Move. You can create a sample contract by selecting the *template* option and clicking the **Create** button. - -
- -
- -### Create a new project - -Automatically generate the Move module structure. Write a name for the project, and click the **Create** button to create a Move module structure. - -:::info -You can create your own Move projects without using the features above. However, for the Remix IDE plugin to build and deploy the Move module, it must be built within the directory `aptos/`. If you start a new project, the structure should resemble: -::: - - ``` - aptos - └── - ├── Move.toml - └── sources - └── YOUR_CONTRACT_FILE.move - ``` - -## Step 5: Compile and publish a Move module to the Aptos blockchain - -1. Select the project you want to compile in the **PROJECT TO COMPILE** section. -2. Add your address to the `Move.toml` file. -3. Click the `Compile` button. - -```toml -[package] -name = "Examples" -version = "0.0.0" - -[addresses] -hello_blockchain = "your address" - -[dependencies] -AptosFramework = { git = "https://github.com/aptos-labs/aptos-core.git", subdir = "aptos-move/framework/aptos-framework/", rev = "aptos-node-v1.2.0" } -``` - -4. When the compilation is complete, a compiled binary file is returned in the `aptos//out` directory. - -If you need to revise the contract and compile again, delete the `out` directory and click **Compile** once more. - -5. Once you have compiled contract code, the `Deploy` button will be activated. - -## Step 6: Interact with a Move module - -:::info -There are two ways to import contracts. -1. Automatically import contracts deployed through the above process. -2. Import existing deployed contracts through the **At Address** button. -::: - -1. Check the modules and resources owned by the current account and read the resources through the **Get Resource** button. -2. You can select a function, enter parameters as needed, and click a button to run the function. For an entry function - not the view function - a signature from the WELLDONE Wallet is required because the transaction signature and request are required. - -
- - - -
- -## Get support - -Click the **Documentation** button to seek help with this Remix IDE plugin. To file requests, click the **Make an issue** button to go to the [welldonestudio](https://github.com/welldonestudio/welldonestudio.github.io) GitHub Repository and [file an issue](https://github.com/welldonestudio/welldonestudio.github.io/issues/new/choose). diff --git a/developer-docs-site/docs/community/contributors.md b/developer-docs-site/docs/community/contributors.md deleted file mode 100644 index 5bd5a5cf9c6a3..0000000000000 --- a/developer-docs-site/docs/community/contributors.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "Contributors" -hide_table_of_contents: true -hide_title: true ---- -import Contributors from "@site/src/components/Contributors"; - - diff --git a/developer-docs-site/docs/community/external-resources.md b/developer-docs-site/docs/community/external-resources.md deleted file mode 100644 index 98b33008b8e43..0000000000000 --- a/developer-docs-site/docs/community/external-resources.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: "External Resources" -slug: "external-resources" ---- - -# Aptos External Resources - -:::caution Proceed with caution -This page links to third-party contents. Aptos neither endorses nor supports these contributions. Nor can we guarantee their effects. -::: - -This page contains links to external resources supplied by the Aptos community. These may be useful, technical posts to the [Aptos Forum](https://forum.aptoslabs.com/) or links to Aptos-related technologies documented elsewhere. - -To add your own resource, click **Edit this page** at the bottom, add your resource in Markdown, and create a pull request for review. - -## Tools - -| Contribution | Description | Author | Date added/updated | -| --- | --- | --- | --- | -| [Aptos Staking Dashboard](https://dashboard.stakeaptos.com) · [Repo](https://github.com/pakaplace/swtb-frontend/) | A dashboard to monitor your Aptos validator performance, view network stats, or request delegation commissions. By [Paymagic Labs](https://paymagic.xyz/). | [pakaplace](https://github.com/pakaplace/) | 2023-03-10 | -| [Aptos Validator/Staking Postman Collection](https://github.com/pakaplace/aptos-validator-staking-postman) | A Postman collection for querying staking pool, staking contract, and account resources/events. | [pakaplace](https://github.com/pakaplace/) | 2023-03-10 | -| [One-stop solution for Aptos node monitoring](https://github.com/LavenderFive/aptos-monitoring) | A monitoring solution for Aptos nodes utilizing Docker containers with Prometheus, Grafana, cAdvisor, NodeExporter, and alerting with AlertManager. | [Lavender.Five Nodes](https://github.com/LavenderFive) | 2023-03-10 | -| [Monitor Your Aptos validator and validator fullnodes with Prometheus and Grafana](https://github.com/RhinoStake/aptos_monitoring) | A full-featured Grafana/Prometheus dashboard to monitor key infrastructure, node, and chain-related metrics and data relationships. | [RHINO](https://rhinostake.com) | 2023-03-10 | - -## Tutorials - -| Contribution | Description | Author | Date added/updated | -| --- | --- | --- | --- | -| [Alerts integration on your validator/full node](https://forum.aptoslabs.com/t/alerts-integration-on-your-validator-full-node/196210) | Explains how to integrate alerts on your validator (fullnode). | [cryptomolot](https://forum.aptoslabs.com/u/unlimitedmolot) | 2023-06-11 | -| [Tools to monitor your validator](https://forum.aptoslabs.com/t/tools-to-monitore-your-validator/197163) | Explains what tools to use to monitor your validator (fullnode). | [cryptomolot](https://forum.aptoslabs.com/u/unlimitedmolot) and [p1xel32](https://forum.aptoslabs.com/u/p1xel32) | 2023-06-11 | -| [How to join validator set via snapshot](https://forum.aptoslabs.com/t/how-to-join-validator-set-via-snapshot/207568) | Demonstrates a method to join a validator set with a snapshot. | [cryptomolot](https://forum.aptoslabs.com/u/unlimitedmolot) | 2023-06-11 | -| [Alerts for your validator via Telegram public](https://forum.aptoslabs.com/t/alerts-for-your-validator-via-telegram-public/201959) | Demonstrates a useful method for receiving alerts. | [cryptomolot](https://forum.aptoslabs.com/u/unlimitedmolot) | 2023-06-11 | -| [Ansible playbook for Node Management (Bare Metal)](https://github.com/RhinoStake/ansible-aptos) | This Ansible Playbook is for the initialization, configuration, planned and hotfix upgrades of Aptos Validators, VFNs and PFNs on bare metal servers. | [RHINO](https://rhinostake.com) | 2023-03-14 | -| [Ansible playbook for Node Management (Docker)](https://github.com/LavenderFive/aptos-ansible) | This Ansible Playbook is intended for node management, including initial launch and handling upgrades of nodes. | [Lavender.Five Nodes](https://github.com/LavenderFive) | 2023-03-13 | -| [Write Your First Smart Contract On Aptos](https://medium.com/mokshyaprotocol/write-your-first-smart-contract-on-aptos-a-step-by-step-guide-e16a6f5c2be6) | This blog is created to help you start writing smart contracts in Aptos Blockchain. | [Samundra Karki](https://medium.com/@samundrakarki56), [MokshyaProtocol](https://mokshya.io/) | 2023-02-27 | -| [Transfer validator node to other server (no FN required)](https://forum.aptoslabs.com/t/transfer-validator-node-to-other-server-no-fn-required/194629/1) | Shows how to transfer a validator node to another server without using an intermediate full node server. | [p1xel32](https://forum.aptoslabs.com/u/p1xel32) | 2023-02-03 | -| [Failover and migrate Validator Nodes for less downtime](https://forum.aptoslabs.com/t/failover-and-migrate-validator-nodes-for-less-downtime/144846) | Explains how to hot swap a validator node with a validator full node with Docker setup and inspired the generic [Update Aptos Validator Node via Failover](../nodes/validator-node/operator/update-validator-node.md). | [guguru](https://forum.aptoslabs.com/u/guguru) | 2022-11-22 | diff --git a/developer-docs-site/docs/community/index.md b/developer-docs-site/docs/community/index.md deleted file mode 100644 index f079aa3e38052..0000000000000 --- a/developer-docs-site/docs/community/index.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -title: "Contribute to the Aptos Ecosystem" -slug: ./ ---- - -# Contribute to the Aptos Ecosystem - -We welcome your own [contributions](https://github.com/aptos-labs/aptos-core/blob/main/CONTRIBUTING.md) to the [Aptos](https://aptosfoundation.org/currents) blockchain and this site! Aptos exists to foster an inclusive ecosystem. This page describes ways you can help, while the other pages in this section highlight our community's contributions. - -As always, adhere to the [Aptos Code of Conduct](https://github.com/aptos-labs/aptos-core/blob/main/CODE_OF_CONDUCT.md) when taking part in our ecosystem. - -## Ask questions and offer answers - -Join [Aptos Discord](https://discord.gg/aptosnetwork) to speak with developers and hop into the Aptos community. It's the best way to keep up to date with news and developments in the Aptos universe. Be sure to check pinned messages in the channels - this is where we like to post topic-specific links, events, and more. - -For technical questions, we recommend [Stack Overflow](https://stackoverflow.com/questions/tagged/aptos) so anyone in the world may search for, benefit from, and upvote questions and answers in a persistent location. To offer your own advice and find tips from others, post to and use the [Aptos Forum](https://forum.aptoslabs.com/). - -Please remember, community managers will never message or DM you first, and they will never ask you to send them money or share any sensitive, private, or personal information. If this happens to you, please report it to us in [Aptos Discord](https://discord.gg/aptosnetwork). - -## Report issues, request enhancements - -Review and upvote [existing issues](https://github.com/aptos-labs/aptos-core/issues) in the Aptos blockchain. - -File [new issues](https://github.com/aptos-labs/aptos-core/issues/new/choose) to report problems or request enhancements. For security concerns, instead follow the [Aptos Core Bug Bounty](https://github.com/aptos-labs/aptos-core/blob/main/SECURITY.md) process. - -Here are the primary bug queues: - -* [Bug report](https://github.com/aptos-labs/aptos-core/issues/new?assignees=&labels=bug&template=bug_report.md&title=%5BBug%5D) - Create a bug report to help improve Aptos Core. -* [DevEx RFC](https://github.com/aptos-labs/aptos-core/issues/new?assignees=&labels=DevEx&template=devex_rfc.md&title=%5BDevEx+RFC%5D+) - Open a Request for Comments (RFC) for DevEx improvements. -* [Documentation/aptos.dev Bug report](https://github.com/aptos-labs/aptos-core/issues/new?assignees=clay-aptos&labels=bug%2Cdocumentation&template=documentation_bug_report.md&title=%5BDocs%5D) - Create a bug report to help improve the Aptos Developers website. -* [Feature request](https://github.com/aptos-labs/aptos-core/issues/new?assignees=&labels=enhancement&template=feature_request.md&title=%5BFeature+Request%5D) - Suggest a new feature in Aptos Core. - -## Develop your own project - -See, employ and join the growing number of delightful [community-driven projects](https://github.com/aptos-foundation/ecosystem-projects) in the Aptos ecosystem. - -## Become an Aptos ambassador - -Help organize events, develop content and more for the ecosystem by joining the [Aptos Collective](https://aptosfoundation.org/currents/join-the-aptos-collective) with plenty of perks in return! - -## Fix the source code - -We at Aptos love direct contributions in the form of [pull requests](https://github.com/aptos-labs/aptos-core/pulls). Help us make small fixes to code. Follow our coding conventions for: - -* [Move](../move/book/coding-conventions.md) -* [Rust](./rust-coding-guidelines.md) - -## Update the documentation - -You may report problems and supply other input in the [#docs-feedback](https://discord.com/channels/945856774056083548/1034215378299133974) channel of [Aptos Discord](https://discord.gg/aptosnetwork). To help with our contents, follow [Update Aptos.dev](./site-updates.md). - diff --git a/developer-docs-site/docs/community/rust-coding-guidelines.md b/developer-docs-site/docs/community/rust-coding-guidelines.md deleted file mode 100644 index d6907b5a8821c..0000000000000 --- a/developer-docs-site/docs/community/rust-coding-guidelines.md +++ /dev/null @@ -1,367 +0,0 @@ ---- -id: rust-coding-guidelines -title: Rust Coding Guidelines ---- - -This document describes the coding guidelines for the [Aptos Core](https://github.com/aptos-labs/aptos-core) Rust codebase. For the Move language, see the [Move Coding Conventions](../move/book/coding-conventions.md). - -## Code formatting - -All code formatting is enforced with [rustfmt](https://github.com/rust-lang/rustfmt) with a project-specific configuration. Below is an example command to adhere to the Aptos Core project conventions. - -``` -cargo fmt -``` - -## Code analysis - -[Clippy](https://github.com/rust-lang/rust-clippy) is used to catch common mistakes and is run as a part of continuous integration. Before submitting your code for review, you can run clippy with our configuration: - -``` -cargo xclippy --all-targets -``` - -In general, we follow the recommendations from [rust-lang](https://rust-lang.github.io/api-guidelines/about.html) and [The Rust Programming Language](https://doc.rust-lang.org/book/). The remainder of this guide provides detailed guidelines on specific topics in order to achieve uniformity of the codebase. - -## Code documentation - -Any public fields, functions, and methods should be documented with [Rustdoc](https://doc.rust-lang.org/book/ch14-02-publishing-to-crates-io.html#making-useful-documentation-comments). - - Please follow the conventions as detailed below for modules, structs, enums, and functions. The *single line* is used as a preview when navigating Rustdoc. As an example, see the 'Structs' and 'Enums' sections in the [collections](https://doc.rust-lang.org/std/collections/index.html) Rustdoc. - - ``` - /// [Single line] One line summary description - /// - /// [Longer description] Multiple lines, inline code - /// examples, invariants, purpose, usage, etc. - [Attributes] If attributes exist, add after Rustdoc - ``` - -Example below: - -```rust -/// Represents (x, y) of a 2-dimensional grid -/// -/// A line is defined by 2 instances. -/// A plane is defined by 3 instances. -#[repr(C)] -struct Point { - x: i32, - y: i32, -} -``` - -### Terminology - -The Aptos codebase uses inclusive terminology (similar to other projects such as [the Linux kernel](https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=49decddd39e5f6132ccd7d9fdc3d7c470b0061bb)). The terms below are recommended when appropriate. -* allowlist - a set of entities allowed access -* blocklist - a set of entities that are blocked from access -* primary/leader/main - a primary entity -* secondary/replica/follower - a secondary entity - -### Constants and fields - -Describe the purpose and definition of this data. If the unit is a measurement of time, include it, e.g., `TIMEOUT_MS` for timeout in milliseconds. - -### Functions and methods - -Document the following for each function: - -* The action the method performs - “This method *adds* a new transaction to the mempool.” Use *active voice* and *present tense* (i.e. adds/creates/checks/updates/deletes). -* Describe how and why to use this method. -* Any condition that must be met _before_ calling the method. -* State conditions under which the function will `panic!()` or returns an `Error` -* Brief description of return values. -* Any special behavior that is not obvious - -### README.md for top-level directories and other major components - -Each major component of Aptos Core needs to have a `README.md` file. Major components are: -* top-level directories (e.g. `aptos-core/network`, `aptos-core/language`) -* the most important crates in the system (e.g. `vm-runtime`) - -This file should contain: - - * The *conceptual* *documentation* of the component. - * A link to the external API documentation for the component. - * A link to the main license of the project. - * A link to the main contributing guide for the project. - -A template for readmes: - -```markdown -# Component Name - -[Summary line: Start with one sentence about this component.] - -## Overview - -* Describe the purpose of this component and how the code in -this directory works. -* Describe the interaction of the code in this directory with -the other components. -* Describe the security model and assumptions about the crates -in this directory. Examples of how to describe the security -assumptions will be added in the future. - -## Implementation Details - -* Describe how the component is modeled. For example, why is the - code organized the way it is? -* Other relevant implementation details. - -## API Documentation - -For the external API of this crate refer to [Link to rustdoc API]. - -[For a top-level directory, link to the most important APIs within.] - -## Contributing - -Refer to the Aptos Project contributing guide [LINK]. - -## License - -Refer to the Aptos Project License [LINK]. -``` - -A good example of README.md is `aptos-core/network/README.md` that describes the networking crate. - -## Binary, Argument, and Crate Naming - -Most tools that we use everyday (rustc, cargo, git, rg, etc.) use dashes `-` as -a separator for binary names and arguments and the [GNU software -manual](https://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html) -dictates that long options should "consist of `--` followed by a name made of -alphanumeric characters and dashes". As such dashes `-` should be used as -separators in both binary names and command line arguments. - -In addition, it is generally accepted by many in the Rust community that dashes -`-` should be used as separators in crate names, i.e. `x25519-dalek`. - -## Code suggestions - -In the following sections, we have suggested some best practices for a uniform codebase. We will investigate and identify the practices that can be enforced using Clippy. This information will evolve and improve over time. - -### Attributes - -Make sure to use the appropriate attributes for handling dead code: - -``` -// For code that is intended for production usage in the future -#[allow(dead_code)] -// For code that is only intended for testing and -// has no intended production use -#[cfg(test)] -``` - -### Avoid Deref polymorphism - -Don't abuse the Deref trait to emulate inheritance between structs, and thus reuse methods. For more information, read [Deref polymorphism](https://rust-unofficial.github.io/patterns/anti_patterns/deref.html). - -### Comments - -We recommend that you use `//` and `///` comments rather than block comments `/* ... */` for uniformity and simpler grepping. - -### Concurrent types - -Concurrent types such as [`CHashMap`](https://docs.rs/crate/chashmap), [`AtomicUsize`](https://doc.rust-lang.org/std/sync/atomic/struct.AtomicUsize.html), etc. have an immutable borrow on self i.e. `fn foo_mut(&self,...)` in order to support concurrent access on interior mutating methods. Good practices (such as those in the examples mentioned) avoid exposing synchronization primitives externally (e.g. `Mutex`, `RwLock`) and document the method semantics and invariants clearly. - -*When to use channels vs concurrent types?* - -Listed below are high-level suggestions based on experience: - -* Channels are for ownership transfer, decoupling of types, and coarse-grained messages. They fit well for transferring ownership of data, distributing units of work, and communicating async results. Furthermore, they help break circular dependencies (e.g. `struct Foo` contains an `Arc` and `struct Bar` contains an `Arc` that leads to complex initialization). - -* Concurrent types (e.g. such as [`CHashMap`](https://docs.rs/crate/chashmap) or structs that have interior mutability building on [`Mutex`](https://doc.rust-lang.org/std/sync/struct.Mutex.html), [`RwLock`](https://doc.rust-lang.org/std/sync/struct.RwLock.html), etc.) are better suited for caches and states. - -### Error handling - -Error handling suggestions follow the [Rust book guidance](https://doc.rust-lang.org/book/ch09-00-error-handling.html). Rust groups errors into two major categories: recoverable and unrecoverable errors. Recoverable errors should be handled with [Result](https://doc.rust-lang.org/std/result/). Our suggestions on unrecoverable errors are listed below: - -*Fallible functions* - -* `duration_since_epoch()` - to obtain the unix time, call the function provided by `aptos-infallible`. -* `RwLock` and `Mutex` - Instead of calling `unwrap()` on the standard library implementations of these functions, use the infallible equivalent types that we provide in `aptos-infallible`. - -*Panic* - -* `unwrap()` - Unwrap should only be used for test code. For all other use cases, prefer `expect()`. The only exception is if the error message is custom-generated, in which case use `.unwrap_or_else(|| panic!("error: {}", foo))`. -* `expect()` - Expect should be invoked when a system invariant is expected to be preserved. `expect()` is preferred over `unwrap()` and should contain a detailed error message on failure in most cases. -* `assert!()` - This macro is kept in both debug/release and should be used to protect invariants of the system as necessary. -* `unreachable!()` - This macro will panic on code that should not be reached (violating an invariant) and can be used where appropriate. - -In production (non-test) code, outside of lock management, all unrecoverable errors should be cleanly documented describing why said event is unrecoverable. For example, if the system is now in a bad state, state what that state is and the motivation for why a crash / restart is more effective than resolving it within a running system, and what if any steps an operator would need to take to resolve the issue. - -### Generics - -Generics allow dynamic behavior (similar to [`trait`](https://doc.rust-lang.org/book/ch10-02-traits.html) methods) with static dispatch. As the number of generic type parameters increases, the difficulty of using the type/method also increases (e.g. consider the combination of trait bounds required for this type, duplicate trait bounds on related types, etc.). In order to avoid this complexity, we generally try to avoid using a large number of generic type parameters. We have found that converting code with a large number of generic objects to trait objects with dynamic dispatch often simplifies our code. - -### Getters/setters - -In general, we follow naming recommendations for getters as specified [here](https://rust-lang.github.io/api-guidelines/naming.html#getter-names-follow-rust-convention-c-getter) and for setters as defined [here](https://github.com/rust-lang/rfcs/blob/master/text/0344-conventions-galore.md#gettersetter-apis). - -Getters/setters should be avoided for [`struct`](https://doc.rust-lang.org/book/ch05-00-structs.html) types in the C spirit: compound, passive data structures without internal invariants. Adding them only increases the complexity and number of lines of code without improving the developer experience. - -```rust -struct Foo { - size: usize, - key_to_value: HashMap -} -impl Foo { - /// Simple getter follows xxx pattern - fn size(&self) -> usize { - self.size - } - /// Setter follows set_xxx pattern - fn set_foo(&mut self, size: usize){ - self.size = size; - } - /// Complex getter follows get_xxx pattern - fn get_value(&self, key: u32) -> Option<&u32> { - self.key_to_value.get(&key) - } -} -``` - -### Integer Arithmetic - -As every integer operation (`+`, `-`, `/`, `*`, etc.) implies edge-cases (e.g. overflows `u64::MAX + 1`, underflows `0u64 -1`, division by zero, etc.), -we use checked arithmetic instead of directly using math symbols. -It forces us to think of edge-cases, and handle them explicitely. -This is a brief and simplified mini guide of the different functions that exist to handle integer arithmetic: - -* [checked_](https://doc.rust-lang.org/std/primitive.u32.html#method.checked_add): use this function if you want to handle overflows and underflows as a special edge-case. It returns `None` if an underflow or overflow has happened, and `Some(operation_result)` otherwise. -* [overflowing_](https://doc.rust-lang.org/std/primitive.u32.html#method.overflowing_add): use this function if you want the result of an overflow to potentially wrap around (e.g. `u64::MAX.overflow_add(10) == (9, true)`). It returns the underflowed or overflowed result as well as a flag indicating if an overflow has occured or not. -* [wrapping_](https://doc.rust-lang.org/std/primitive.u32.html#method.wrapping_add): this is similar to overflowing operations, except that it returns the result directly. Use this function if you are sure that you want to handle underflows and overflows by wrapping around. -* [saturating_](https://doc.rust-lang.org/std/primitive.u32.html#method.saturating_add): if an overflow occurs, the result is kept within the boundary of the type (e.g. `u64::MAX.saturating_add(1) == u64::MAX`). - -### Logging - -We currently use [log](https://docs.rs/log/) for logging. - -* [error!](https://docs.rs/log/latest/log/macro.error.html) - Error-level messages have the highest urgency in [log](https://docs.rs/log/). An unexpected error has occurred (e.g. exceeded the maximum number of retries to complete an RPC or inability to store data to local storage). -* [warn!](https://docs.rs/log/latest/log/macro.warn.html) - Warn-level messages help notify admins about automatically handled issues (e.g. retrying a failed network connection or receiving the same message multiple times, etc.). -* [info!](https://docs.rs/log/latest/log/macro.info.html) - Info-level messages are well suited for "one-time" events (such as logging state on one-time startup and shutdown) or periodic events that are not frequently occurring - e.g. changing the validator set every day. -* [debug!](https://docs.rs/log/latest/log/macro.debug.html) - Debug-level messages can occur frequently (i.e. potentially > 1 message per second) and are not typically expected to be enabled in production. -* [trace!](https://docs.rs/log/latest/log/macro.trace.html) - Trace-level logging is typically only used for function entry/exit. - -### Testing - -*Unit tests* - -We follow the general guidance provided [here](https://doc.rust-lang.org/book/ch11-03-test-organization.html). Ideally, all code should be unit tested. Unit tests should be in the same file as the code it is testing though in a distinct module, using the following syntax: - -```rust -struct Foo { -} -impl Foo { - pub fn magic_number() -> u8 { - 42 - } -} -#[cfg(test)] -mod tests { - #test - fn verify_magic_number() { - assert_eq!(Foo::magic_number(), 42); - } -} -``` - -*Property-based tests* - -Aptos contains [property-based tests](https://blog.jessitron.com/2013/04/25/property-based-testing-what-is-it/) written in Rust using the [`proptest` framework](https://github.com/AltSysrq/proptest). Property-based tests generate random test cases and assert that invariants, also called *properties*, hold for the code under test. - -Some examples of properties tested in Aptos: - -* Every serializer and deserializer pair is tested for correctness with random inputs to the serializer. Any pair of functions that are inverses of each other can be tested this way. -* The results of executing common transactions through the VM are tested using randomly generated scenarios and verified with an *Oracle*. - -A tutorial for `proptest` can be found in the [`proptest` book](https://altsysrq.github.io/proptest-book/proptest/getting-started.html). - -References: - -* [What is Property Based Testing?](https://hypothesis.works/articles/what-is-property-based-testing/) (includes a comparison with fuzzing) -* [An introduction to property-based testing](https://fsharpforfunandprofit.com/posts/property-based-testing/) -* [Choosing properties for property-based testing](https://fsharpforfunandprofit.com/posts/property-based-testing-2/) - -*Fuzzing* - -Aptos contains harnesses for fuzzing crash-prone code like deserializers, using [`libFuzzer`](https://llvm.org/docs/LibFuzzer.html) through [`cargo fuzz`](https://rust-fuzz.github.io/book/cargo-fuzz.html). For more examples, see the `testsuite/aptos_fuzzer` directory. - -### Conditional compilation of tests - -Aptos [conditionally -compiles](https://doc.rust-lang.org/stable/reference/conditional-compilation.html) -code that is *only relevant for tests, but does not consist of tests* (unitary -or otherwise). Examples of this include proptest strategies, implementations -and derivations of specific traits (e.g. the occasional `Clone`), helper -functions, etc. Since Cargo is [currently not equipped for automatically activating features -in tests/benchmarks](https://github.com/rust-lang/cargo/issues/2911), we rely on two -conditions to perform this conditional compilation: -- the test flag, which is activated by dependent test code in the same crate - as the conditional test-only code. -- the `fuzzing` custom feature, which is used to enable fuzzing and testing -related code in downstream crates. Note that this must be passed explicitly to -`cargo xtest` and `cargo x bench`. Never use this in `[dependencies]` unless -the crate is only for testing. - -As a consequence, it is recommended that you set up your test-only code in the following fashion. - -**For production crates:** - -Production crates are defined as the set of crates that create externally published artifacts, e.g. the Aptos validator, -the Move compiler, and so on. - -For the sake of example, we'll consider you are defining a test-only helper function `foo` in `foo_crate`: - -1. Define the `fuzzing` flag in `foo_crate/Cargo.toml` and make it non-default: - ```toml - [features] - default = [] - fuzzing = [] - ``` -2. Annotate your test-only helper `foo` with both the `test` flag (for in-crate callers) and the `"fuzzing"` custom feature (for out-of-crate callers): - ```rust - #[cfg(any(test, feature = "fuzzing"))] - fn foo() { ... } - ``` -3. (optional) Use `cfg_attr` to make test-only trait derivations conditional: - ```rust - #[cfg_attr(any(test, feature = "testing"), derive(FooTrait))] - #[derive(Debug, Display, ...)] // inconditional derivations - struct Foo { ... } - ``` -4. (optional) Set up feature transitivity for crates that call crates that have test-only members. Let's say it's the case of `bar_crate`, which, through its test helpers, calls into `foo_crate` to use your test-only `foo`. Here's how you would set up `bar_crate/Cargo.toml`: - ```toml - [features] - default = [] - fuzzing = ["foo_crate/fuzzing"] - ``` - -**For test-only crates:** - -Test-only crates do not create published artifacts. They consist of tests, benchmarks or other code that verifies -the correctness or performance of published artifacts. Test-only crates are -explicitly listed in `x.toml` under `[workspace.test-only]`. - -These crates do not need to use the above setup. Instead, they can enable the `fuzzing` feature in production crates -directly. - -```toml -[dependencies] -foo_crate = { path = "...", features = ["fuzzing"] } -``` - -*A final note on integration tests*: All tests that use conditional test-only -elements in another crate need to activate the "fuzzing" feature through the -`[features]` section in their `Cargo.toml`. [Integration -tests](https://doc.rust-lang.org/rust-by-example/testing/integration_testing.html) -can neither rely on the `test` flag nor do they have a proper `Cargo.toml` for -feature activation. In the Aptos codebase, we therefore recommend that -*integration tests which depend on test-only code in their tested crate* be -extracted to their own test-only crate. See `language/move-binary-format/serializer_tests` -for an example of such an extracted integration test. - -*Note for developers*: The reason we use a feature re-export (in the `[features]` section of the `Cargo.toml` is that a profile is not enough to activate the `"fuzzing"` feature flag. See [cargo-issue #291](https://github.com/rust-lang/cargo/issues/2911) for details). diff --git a/developer-docs-site/docs/community/site-updates.md b/developer-docs-site/docs/community/site-updates.md deleted file mode 100644 index 60a640caac377..0000000000000 --- a/developer-docs-site/docs/community/site-updates.md +++ /dev/null @@ -1,89 +0,0 @@ ---- -title: "Update Aptos.dev" -slug: "site-updates" ---- - -# Update Aptos.dev - -As an open source project, Aptos needs your knowledge to grow. Follow the instructions on this page to update [Aptos.dev](https://aptos.dev/), the developer website for the Aptos blockchain. Every contributor to Aptos.dev is listed as an *author* on the pages they edit and update. See the *Authors* list at the bottom of any page for an example. - -See the [Aptos Docs](https://github.com/orgs/aptos-labs/projects/14/views/1) project for open issues by status. See detailed instructions for making updates below. - -## tl;dr - -Simply click **Edit this page** at the bottom of any location to go to the source and trigger editing there. The contents are in [Markdown](https://www.markdownguide.org/basic-syntax/) format. You may then edit in browser and use the *Preview* function to view your changes. - -Here are the basic steps for editing in your web browser: - -1. Click **Edit this page** at the bottom to get started. -2. Modify and add source Markdown files in the [developer-docs-site](https://github.com/aptos-labs/aptos-core/tree/main/developer-docs-site) directory. -3. See your changes in Netlify (by swapping `prnumber` in): - [https://deploy-preview-prnumber--aptos-developer-docs.netlify.app/](https://deploy-preview-prnumber--aptos-developer-docs.netlify.app/) -4. Have at least two verified reviewers examine and test the change. -5. Merge in the change and see it go live. - -For more complex documentation updates, we recommend [forking the repository](https://github.com/aptos-labs/aptos-core/blob/main/CONTRIBUTING.md#developer-workflow) and using a local editor to make changes. To edit at the command line and preview your changes on your localhost, see our [Developer Documentation](https://github.com/aptos-labs/aptos-core/blob/main/developer-docs-site/README.md) README. - -When ready, [start a pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request) with your changes. We will get back to you shortly. - - -## Supporting resources - -The Aptos Docs recommends these materials for good documentation: - -- [Aptos Style](./aptos-style.md) - A brief set of guidance for contributions to Aptos.dev. -- [Google Style Guide](https://developers.google.com/style) - A Google standard adopted by companies large and small. -- [Technical writing courses](https://developers.google.com/tech-writing) - Google offers basic courses on tech writing for engineers and others. -- [DITA](https://en.wikipedia.org/wiki/Darwin_Information_Typing_Architecture) - The Aptos Docs team adheres to the [Darwin Information Typing Architecture](https://en.wikipedia.org/wiki/Darwin_Information_Typing_Architecture) whereby all technical documentation is broken down into concepts (overviews), tasks (procedures), and references (lists) to best suit our audiences and their mindsets (learning, doing, finding) at the time of reading. -- [Open source templates](https://gitlab.com/tgdp/templates) - The [Good Docs Project](https://thegooddocsproject.dev/) gives us myriad templates in Markdown for various documentation types we should take advantage of in Aptos.dev. - -## Make updates directly - -Whenever possible, update [Aptos.dev](http://Aptos.dev) directly to reflect your changes to development. This might be as simple as changing a value or as complex as adding an entirely new page or set of pages. - -To update [Aptos.dev](http://Aptos.dev) directly: - -1. Trigger an edit to the source files in the [developer-docs-site](https://github.com/aptos-labs/aptos-core/tree/main/developer-docs-site) directory: - 1. In web browser: - * for simple, one-page changes, use the ***Edit this page*** link on the bottom of any page to access the source Markdown file in GitHub: - ![v-fn-network.svg](../../static/img/docs/trigger-edits-aptosdev.png) - Then click the pencil icon and select **Edit this file** to work in the GitHub web editor, and create a pull request to have it reviewed: - ![v-fn-network.svg](../../static/img/docs/edit-file-in-GH.png) - * To add a new page, navigate to the relevant subdirectory of the [developer-docs-site/docs/](https://github.com/aptos-labs/aptos-core/tree/main/developer-docs-site/docs/) directory, click **Add file**, give it a name, append the `.md` file extension, include your contents, and create a pull request to have it reviewed: - ![v-fn-network.svg](../../static/img/docs/add-file-in-GH.png) - 2. Via local editor - for more complex, multi-page changes, use your preferred source code editor to navigate to and update the source Markdown files in GitHub. See our [CONTRIBUTING](https://github.com/aptos-labs/aptos-core/blob/main/CONTRIBUTING.md) README for `git clone` instructions. -2. For web edits, use the *Preview* function at top to see your updates in browser. -3. For local edits, use the [local doc build instructions](https://github.com/aptos-labs/aptos-core/blob/main/developer-docs-site/README.md) to see your updates at: [http://localhost:3000](http://localhost:3000) -4. After creating the pull request, use the *Deploy Preview* in Netlify to see your updates made in web browser or via local editor by replacing the *prnumber* with your own in: -[https://deploy-preview-prnumber--aptos-developer-docs.netlify.app/](https://deploy-preview-prnumber--aptos-developer-docs.netlify.app/) -5. Have at least two verified reviewers review and test your changes. -6. Make direct commits during review. -7. Request review from the Docs team (currently, clay-aptos in GitHub). -8. Use the *Assignee* field in the PR to identify the review the change is blocking upon. -9. Receive and address *all feedback*. -10. Get approval from at least two verified reviewers. -11. Merge in the change. -12. Monitor builds at: [https://app.netlify.com/sites/aptos-developer-docs/overview](https://app.netlify.com/sites/aptos-developer-docs/overview) - -## Request docs changes - -If you are unable to make the update yourself or simply need Docs team help along the way: - -1. See the existing list of [open issues tagged as Documentation](https://github.com/aptos-labs/aptos-core/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation) in GitHub. -2. If one does not exist, file a new [Documentation issue](https://github.com/aptos-labs/aptos-core/issues/new?assignees=clay-aptos&labels=bug%2Cdocumentation&template=documentation_bug_report.md&title=%5BDocs%5D). -3. Answer all relevant questions/sections in the bug template (such as URL to the affected page). -4. Set a priority for the doc issue: - 1. [P0](https://github.com/aptos-labs/aptos-core/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation+label%3Ap0+) - critical and urgent - 2. [P1](https://github.com/aptos-labs/aptos-core/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation+label%3Ap1+) - important and needed soon - 3. [P2](https://github.com/aptos-labs/aptos-core/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation+label%3Ap2+) - can wait for this; still dependent on other work - 4. [P3](https://github.com/aptos-labs/aptos-core/issues?q=is%3Aissue+is%3Aopen+label%3Adocumentation+label%3Ap3+) - back burner item; there is no urgency here -5. Explain in the issue precisely what is expected in the doc; what requirements must it meet? -6. Assign the issue to and work with the subject matter experts and the Docs team to generate new and updated materials. -7. Associate all related pull requests with the issue by adding the issue number to the *Development* field of each PR. -8. Re-open the issue when related PRs are merged and work is still needed. -9. Close the issue only when all relevant parties are satisfied with the work. - - - - - diff --git a/developer-docs-site/docs/concepts/_category_.json b/developer-docs-site/docs/concepts/_category_.json deleted file mode 100644 index daec3e16a89dc..0000000000000 --- a/developer-docs-site/docs/concepts/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Basics", - "position": 1 -} diff --git a/developer-docs-site/docs/concepts/accounts.md b/developer-docs-site/docs/concepts/accounts.md deleted file mode 100755 index 4df214323f514..0000000000000 --- a/developer-docs-site/docs/concepts/accounts.md +++ /dev/null @@ -1,159 +0,0 @@ ---- -title: "Accounts" -id: "accounts" ---- - -# Accounts - -An account on the Aptos blockchain represents access control over a set of assets including on-chain currency and NFTs. In Aptos, these assets are represented by a Move language primitive known as a **resource** that emphasizes both access control and scarcity. - -Each account on the Aptos blockchain is identified by a 32-byte account address. You can employ the [Aptos Name Service](../integration/aptos-name-service-connector.md) at [www.aptosnames.com](https://www.aptosnames.com/) to secure .apt domains for key accounts to make them memorable and unique. - -Different from other blockchains where accounts and addresses are implicit, accounts on Aptos are explicit and need to be created before they can execute transactions. The account can be created explicitly or implicitly by transferring Aptos tokens (APT) there. See the [Creating an account](#creating-an-account) section for more details. In a way, this is similar to other chains where an address needs to be sent funds for gas before it can send transactions. - -Explicit accounts allow first-class features that are not available on other networks such as: -* Rotating authentication key. The account's authentication key can be changed to be controlled via a different private key. This is similar to changing passwords in the web2 world. -* Native multisig support. Accounts on Aptos support k-of-n multisig using both Ed25519 and Secp256k1 ECDSA signature schemes when constructing the authentication key. - -There are three types of accounts on Aptos: - * *Standard account* - This is a typical account corresponding to an address with a corresponding pair of public/private keys. - * [*Resource account*](../move/move-on-aptos/resource-accounts.md) - An autonomous account without a corresponding private key used by developers to store resources or publish modules on-chain. - * [*Object*](../standards/aptos-object.md) - A complex set of resources stored within a single address representing a single entity. - -:::tip Account address example -Account addresses are 32-bytes. They are usually shown as 64 hex characters, with each hex character a nibble. -Sometimes the address is prefixed with a 0x. See the [Your First Transaction](../tutorials/first-transaction.md) for an example -of how an address appears, reproduced below: - -```text -Alice: 0xeeff357ea5c1a4e7bc11b2b17ff2dc2dcca69750bfef1e1ebcaccf8c8018175b -Bob: 0x19aadeca9388e009d136245b9a67423f3eee242b03142849eb4f81a4a409e59c -``` -::: - -## Account address - -Currently, Aptos supports only a single, unified identifier for an account. Accounts on Aptos are universally represented as a 32-byte hex string. A hex string shorter than 32-bytes is also valid; in those scenarios, the hex string can be padded with leading zeroes, e.g., `0x1x` => `0x0000000000000...01`. While Aptos standards indicate leading zeroes may be removed from an Address, most applications attempt to eschew that legacy behavior and only support the removal of 0s for special addresses ranging from `0x0` to `0xa`. - -## Creating an account - -When a user requests to create an account, for example by using the [Aptos SDK](https://aptos-labs.github.io/ts-sdk-doc/classes/AptosAccount.html), the following steps are executed: - -- Select the authentication scheme for managing the user's account, e.g., Ed25519 or Secp256k1 ECDSA. -- Generate a new private key, public key pair. -- Combine the public key with the public key's authentication scheme to generate a 32-byte authentication key and the account address. - -The user should use the private key for signing the transactions associated with this account. - -## Account sequence number - -The sequence number for an account indicates the number of transactions that have been submitted and committed on-chain from that account. Committed transactions either execute with the resulting state changes committed to the blockchain or abort wherein state changes are discarded and only the transaction is stored. - -Every transaction submitted must contain a unique sequence number for the given sender's account. When the Aptos blockchain processes the transaction, it looks at the sequence number in the transaction and compares it with the sequence number in the on-chain account. The transaction is processed only if the sequence number is equal to or larger than the current sequence number. Transactions are only forwarded to other mempools or executed if there is a contiguous series of transactions from the current sequence number. Execution rejects out of order sequence numbers preventing replay attacks of older transactions and guarantees ordering of future transactions. - -## Authentication key - -The initial account address is set to the authentication key derived during account creation. However, the authentication key may subsequently change, for example when you generate a new public-private key pair, public keys to rotate the keys. An account address never changes. - -The Aptos blockchain supports the following authentication schemes: - -1. [Ed25519](https://ed25519.cr.yp.to/) -2. [Secp256k1 ECDSA](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-49.md) -3. [K-of-N multi-signatures](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-55.md) -4. A dedicated, now legacy, MultiEd25519 scheme - -:::note -The Aptos blockchain defaults to Ed25519 signature transactions. -::: - -### Ed25519 authentication - -To generate an authentication key and the account address for an Ed25519 signature: - -1. **Generate a key-pair**: Generate a fresh key-pair (`privkey_A`, `pubkey_A`). The Aptos blockchain uses the PureEdDSA scheme over the Ed25519 curve, as defined in RFC 8032. -2. **Derive a 32-byte authentication key**: Derive a 32-byte authentication key from the `pubkey_A`: - ``` - auth_key = sha3-256(pubkey_A | 0x00) - ``` - where `|` denotes concatenation. The `0x00` is the 1-byte single-signature scheme identifier. -3. Use this initial authentication key as the permanent account address. - -### MultiEd25519 authentication - -With K-of-N multisig authentication, there are a total of N signers for the account, and at least K of those N signatures -must be used to authenticate a transaction. - -To generate a K-of-N multisig account's authentication key and the account address: - -1. **Generate key-pairs**: Generate `N` ed25519 public keys `p_1`, ..., `p_n`. -2. Decide on the value of `K`, the threshold number of signatures needed for authenticating the transaction. -3. **Derive a 32-byte authentication key**: Compute the authentication key as described below: - ``` - auth_key = sha3-256(p_1 | . . . | p_n | K | 0x01) - ``` - The `0x01` is the 1-byte multisig scheme identifier. -4. Use this initial authentication key as the permanent account address. - -### Generalized authentication - -Generalized authentication supports both Ed25519 and Secp256k1 ECDSA. Like the previous authentication schemes, these schemes contain a scheme value, `0x02` and `0x03` for single and multikey respectively, but also each key contains a prefix value to indicate its key type: - -- **1-byte Ed25519 generalized scheme**: `0x00`, -- **1-byte Secp256k1 ECDSA generalized scheme**: `0x01`. - -For a single key Secp256k1 ECDSA account, using public key `pubkey`, the authentication key would be derived as follows: -``` -auth_key = sha3-256(0x01 | pubkey | 0x02) -``` -Where -* the first entry, `0x01`, represents the use of a Secp256k1 ECDSA key; -* the last entry, `0x02`, represents the authentication scheme. - -For a multi-key account containing, a single Secp256k1 ECDSA public key, `pubkey_0`, and a single Ed25519 public key, `pubkey_1`, where one signature suffices, the authentication key would be derived as follows: -``` -auth_key = sha3-256(0x02 | 0x01 | pubkey_0 | 0x02 | pubkey_2 | 0x01 | 0x03) -``` -Where -* the first entry, `0x02`, represents the total number of keys as a single byte; -* the second to last entry, `0x01`, represents the required number of singatures as a single byte; -* the last entry, `0x03`, represents the authentication scheme. - -## Rotating the keys -An Account on Aptos has the ability to rotate keys so that potentially compromised keys cannot be used to access the accounts. Keys can be rotated via the `account::rotate_authentication_key` function. - -Refreshing the keys is generally regarded as good hygiene in the security field. However, this presents a challenge for system integrators who are used to using a mnemonic to represent both a private key and its associated account. To simplify this for the system integrators, Aptos provides an on-chain mapping via aptos account lookup-address. The on-chain data maps an effective account address as defined by the current mnemonic to the actual account address. - -For more information, see [`account.move`](https://github.com/aptos-labs/aptos-core/blob/a676c1494e246c31c5e96d3363d99e2422e30f49/aptos-move/framework/aptos-framework/sources/account.move#L274). - -## State of an account - -The state of each account comprises both the code (Move modules) and the data (Move resources). An account may contain an arbitrary number of Move modules and Move resources: - -- **Move modules**: Move modules contain code, for example, type and procedure declarations; but they do not contain data. A Move module encodes the rules for updating the Aptos blockchain's global state. -- **Move resources**: Move resources contain data but no code. Every resource value has a type that is declared in a module published on the Aptos blockchain. - -## Access control with signers - -The sender of a transaction is represented by a signer. When a function in a Move module takes `signer` as an argument, the Aptos Move VM translates the identity of the account that signed the transaction into a signer in a Move module entry point. See the below Move example code with `signer` in the `initialize` and `withdraw` functions. When a `signer` is not specified in a function, for example, the below `deposit` function, then no signer-based access controls will be provided for this function: - -```rust -module Test::Coin { - struct Coin has key { amount: u64 } - - public fun initialize(account: &signer) { - move_to(account, Coin { amount: 1000 }); - } - - public fun withdraw(account: &signer, amount: u64): Coin acquires Coin { - let balance = &mut borrow_global_mut(Signer::address_of(account)).amount; - *balance = *balance - amount; - Coin { amount } - } - - public fun deposit(account: address, coin: Coin) acquires Coin { - let balance = &mut borrow_global_mut(account).amount; - *balance = *balance + coin.amount; - Coin { amount: _ } = coin; - } -} -``` diff --git a/developer-docs-site/docs/concepts/base-gas.md b/developer-docs-site/docs/concepts/base-gas.md deleted file mode 100644 index e1aa94452255d..0000000000000 --- a/developer-docs-site/docs/concepts/base-gas.md +++ /dev/null @@ -1,319 +0,0 @@ ---- -title: "Computing Transaction Gas" -id: "base-gas" ---- - -# Computing Transaction Gas - -Aptos transactions by default charge a base gas fee, regardless of market conditions. -For each transaction, this "base gas" amount is based on three conditions: - -1. Instructions. -2. Storage. -3. Payload. - -The more function calls, branching conditional statements, etc. that a transaction requires, the more instruction gas it will cost. -Likewise, the more reads from and writes into global storage that a transaction requires, the more storage gas it will cost. -Finally, the more bytes in a transaction payload, the more it will cost. - -As explained in the [optimization principles](#optimization-principles) section, storage gas has by far the largest effect on base gas. For background on the Aptos gas model, see [The Making of the Aptos Gas Schedule](https://aptoslabs.medium.com/the-making-of-the-aptos-gas-schedule-508d5686a350). - - -## Instruction gas - -Basic instruction gas parameters are defined at [`instr.rs`] and include the following instruction types: - -### No-operation - -| Parameter | Meaning | -|-----------|----------------| -| `nop` | A no-operation | - -### Control flow - -| Parameter | Meaning | -|------------|----------------------------------| -| `ret` | Return | -| `abort` | Abort | -| `br_true` | Execute conditional true branch | -| `br_false` | Execute conditional false branch | -| `branch` | Branch | - -### Stack - -| Parameter | Meaning | -|---------------------|----------------------------------| -| `pop` | Pop from stack | -| `ld_u8` | Load a `u8` | -| `ld_u16` | Load a `u16` | -| `ld_u32` | Load a `u32` | -| `ld_u64` | Load a `u64` | -| `ld_u128` | Load a `u128` | -| `ld_256` | Load a `u256` | -| `ld_true` | Load a `true` | -| `ld_false` | Load a `false` | -| `ld_const_base` | Base cost to load a constant | -| `ld_const_per_byte` | Per-byte cost to load a constant | - -### Local scope - -| Parameter | Meaning | -|-----------------------------|--------------------------| -| `imm_borrow_loc` | Immutably borrow | -| `mut_borrow_loc` | Mutably borrow | -| `imm_borrow_field` | Immutably borrow a field | -| `mut_borrow_field` | Mutably borrow a field | -| `imm_borrow_field_generic` | | -| `mut_borrow_field_generic` | | -| `copy_loc_base` | Base cost to copy | -| `copy_loc_per_abs_val_unit` | | -| `move_loc_base` | Move | -| `st_loc_base` | | - -### Calling - -| Parameter | Meaning | -|---------------------------|---------------------------------| -| `call_base` | Base cost for a function call | -| `call_per_arg` | Cost per function argument | -| `call_per_local` | Cost per local argument | -| `call_generic_base` | | -| `call_generic_per_ty_arg` | Cost per type argument | -| `call_generic_per_arg` | | -| `call_generic_per_local` | Cost generic per local argument | - -### Structs - -| Parameter | Meaning | -|----------------------------|--------------------------------------| -| `pack_base` | Base cost to pack a `struct` | -| `pack_per_field` | Cost to pack a `struct`, per field | -| `pack_generic_base` | | -| `pack_generic_per_field` | | -| `unpack_base` | Base cost to unpack a `struct` | -| `unpack_per_field` | Cost to unpack a `struct`, per field | -| `unpack_generic_base` | | -| `unpack_generic_per_field` | | - -### References - -| Parameter | Meaning | -|-----------------------------|------------------------------------| -| `read_ref_base` | Base cost to read from a reference | -| `read_ref_per_abs_val_unit` | | -| `write_ref_base` | Base cost to write to a reference | -| `freeze_ref` | Freeze a reference | - -### Casting - -| Parameter | Meaning | -|-------------|------------------| -| `cast_u8` | Cast to a `u8` | -| `cast_u16` | Cast to a `u16` | -| `cast_u32` | Cast to a `u32` | -| `cast_u64` | Cast to a `u64` | -| `cast_u128` | Cast to a `u128` | -| `cast_u256` | Cast to a `u256` | - -### Arithmetic - -| Parameter | Meaning | -|-----------|----------| -| `add` | Add | -| `sub` | Subtract | -| `mul` | Multiply | -| `mod_` | Modulo | -| `div` | Divide | - - -### Bitwise - -| Parameter | Meaning | -|-----------|---------------------------| -| `bit_or` | `OR`: | | -| `bit_and` | `AND`: `&` | -| `xor` | `XOR`: `^` | -| `shl` | Shift left: `<<` | -| `shr` | Shift right: `>>` | - -### Boolean - -| Parameter | Meaning | -|-----------|---------------------------------| -| `or` | `OR`: || | -| `and` | `AND`: `&&` | -| `not` | `NOT`: `!` | - - -### Comparison - -| Parameter | Meaning | -|------------------------|--------------------------------| -| `lt` | Less than: `<` | -| `gt` | Greater than: `>` | -| `le` | Less than or equal to: `<=` | -| `ge` | Greater than or equal to: `>=` | -| `eq_base` | Base equality cost: `==` | -| `eq_per_abs_val_unit` | | -| `neq_base` | Base not equal cost: `!=` | -| `neq_per_abs_val_unit` | | - -### Global storage - -| Parameter | Meaning | -|----------------------------------|-------------------------------------------------------| -| `imm_borrow_global_base` | Base cost to immutably borrow: `borrow_global()` | -| `imm_borrow_global_generic_base` | | -| `mut_borrow_global_base` | Base cost to mutably borrow: `borrow_global_mut()` | -| `mut_borrow_global_generic_base` | | -| `exists_base` | Base cost to check existence: `exists()` | -| `exists_generic_base` | | -| `move_from_base` | Base cost to move from: `move_from()` | -| `move_from_generic_base` | | -| `move_to_base` | Base cost to move to: `move_to()` | -| `move_to_generic_base` | | - -### Vectors - -| Parameter | Meaning | -|--------------------------------|------------------------------------------| -| `vec_len_base` | Length of a vector | -| `vec_imm_borrow_base` | Immutably borrow an element | -| `vec_mut_borrow_base` | Mutably borrow an element | -| `vec_push_back_base` | Push back | -| `vec_pop_back_base` | Pop from the back | -| `vec_swap_base` | Swap elements | -| `vec_pack_base` | Base cost to pack a vector | -| `vec_pack_per_elem` | Cost to pack a vector per element | -| `vec_unpack_base` | Base cost to unpack a vector | -| `vec_unpack_per_expected_elem` | Base cost to unpack a vector per element | - -Additional storage gas parameters are defined in [`table.rs`], [`move_stdlib.rs`], and other assorted source files in [`aptos-gas-schedule/src/`]. - -## IO and Storage charges - -The following gas parameters are applied (i.e., charged) to represent the costs associated with transient storage device resources, including disk IOPS and bandwidth: - -| Parameter | Meaning | -|---------------------------------|--------------------------------------------------------------------| -| storage_io_per_state_slot_write | charged per state write operation in the transaction output | -| storage_io_per_state_byte_write | charged per byte in all state write ops in the transaction output | -| storage_io_per_state_slot_read | charged per item loaded from global state | -| storage_io_per_state_byte_read | charged per byte loaded from global state | - -The following storage fee parameters are applied (i.e., charged in absolute APT values) to represent the disk space and structural costs associated with using the [Aptos authenticated data structure](../reference/glossary.md#merkle-trees) for storing items on the blockchain. This encompasses actions such as creating things in the global state, emitting events, and similar operations: - -| Parameter | Meaning | -|-----------------------------------|----------------------------------------------------------------------------------------| -| free_write_bytes_quota | 1KB (configurable) free bytes per state slot. (*Subject to short-term change.*) | -| free_event_bytes_quota | 1KB (configurable) free event bytes per transaction. (*Subject to short-term change.*) | -| storage_fee_per_state_slot_create | allocating a state slot, by `move_to()`, `table::add()`, etc | -| storage_fee_per_excess_state_byte | per byte beyond `free_write_bytes_quota` per state slot. Notice this is charged every time the slot is written to, not only at allocation time. | -| storage_fee_per_event_byte | per byte beyond `free_event_bytes_quota` per transaction. | -| storage_fee_per_transaction_byte | each transaction byte beyond `large_transaction_cutoff`. (search in the page) | - -### Vectors - -Byte-wise fees are similarly assessed on vectors, which consume $\sum_{i = 0}^{n - 1} e_i + b(n)$ bytes, where: - -* $n$ is the number of elements in the vector -* $e_i$ is the size of element $i$ -* $b(n)$ is a "base size" which is a function of $n$ - -See the [BCS sequence specification] for more information on vector base size (technically a `ULEB128`), which typically occupies just one byte in practice, such that a vector of 100 `u8` elements accounts for $100 + 1 = 101$ bytes. -Hence per the item-wise read methodology described above, reading the last element of such a vector is treated as a 101-byte read. - -## Payload gas - -Payload gas is defined in [`transaction.rs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs), which incorporates storage gas with several payload- and pricing-associated parameters: - -| Parameter | Meaning | -|---------------------------------|----------------------------------------------------------------------------------------| -| `min_transaction_gas_units` | Minimum internal gas units for a transaction, charged at the start of execution | -| `large_transaction_cutoff` | Size, in bytes, above which transactions will be charged an additional amount per byte | -| `intrinsic_gas_per_byte` | Internal gas units charged per byte for payloads above `large_transaction_cutoff` | -| `maximum_number_of_gas_units` | Upper limit on external gas units for a transaction | -| `min_price_per_gas_unit` | Minimum gas price allowed for a transaction | -| `max_price_per_gas_unit` | Maximum gas price allowed for a transaction | -| `max_transaction_size_in_bytes` | Maximum transaction payload size in bytes | -| `gas_unit_scaling_factor` | Conversion factor between internal gas units and external gas units | - -Here, "internal gas units" are defined as constants in source files like [`instr.rs`] and [`storage_gas.move`], which are more granular than "external gas units" by a factor of `gas_unit_scaling_factor`: -to convert from internal gas units to external gas units, divide by `gas_unit_scaling_factor`. -Then, to convert from external gas units to octas, multiply by the "gas price", which denotes the number of octas per unit of external gas. - -## Optimization principles - -### Unit and pricing constants - -As of the time of this writing, `min_price_per_gas_unit` in [`transaction.rs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs) is defined as [`aptos_global_constants`]`::GAS_UNIT_PRICE` (which is itself defined as 100), with other noteworthy [`transaction.rs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs) constants as follows: - -| Constant | Value | -|---------------------------|--------| -| `min_price_per_gas_unit` | 100 | -| `max_price_per_gas_unit` | 10,000,000,000 | -| `gas_unit_scaling_factor` | 1,000,000 | - -See [Payload gas](#payload-gas) for the meaning of these constants. - -### Storage Fee - -When the network load is low, the gas unit price is expected to be low, making most aspects of the transaction cost more affordable. However, the storage fee is an exception, as it's priced in terms of absolute APT value. In most instances, the transaction fee is the predominant component of the overall transaction cost. This is especially true when a transaction allocates state slots, writes to sizable state items, emits numerous or large events, or when the transaction itself is a large one. All of these factors consume disk space on Aptos nodes and are charged accordingly. - -On the other hand, the storage refund incentivizes releasing state slots by deleting state items. The state slot fee is fully refunded upon slot deallocation, while the excess state byte fee is non-refundable. This will soon change by differentiating between permanent bytes (those in the global state) and relative ephemeral bytes (those that traverse the ledger history). - -Some cost optimization strategies concerning the storage fee: - -1. Minimize state item creation. -2. Minimize event emissions. -3. Avoid large state items, events, and transactions. -4. Clean up state items that are no longer in use. -5. If two fields are consistently updated together, group them into the same resource or resource group. -6. If a struct is large and only a few fields are updated frequently, move those fields to a separate resource or resource group. - - -### Instruction gas - -As of the time of this writing, all instruction gas operations are multiplied by the `EXECUTION_GAS_MULTIPLIER` defined in [`gas_meter.rs`], which is set to 20. -Hence the following representative operations assume gas costs as follows (divide internal gas by scaling factor, then multiply by minimum gas price): - -| Operation | Minimum octas | -|------------------------------|---------------| -| Table add/borrow/remove box | 240 | -| Function call | 200 | -| Load constant | 130 | -| Globally borrow | 100 | -| Read/write reference | 40 | -| Load `u128` on stack | 16 | -| Table box operation per byte | 2 | - -(Note that per-byte table box operation instruction gas does not account for storage gas, which is assessed separately). - -For comparison, reading a 100-byte item costs $r_i + 100 * r_b = 3000 + 100 * 3 = 3300$ octas at minimum, some 16.5 times as much as a function call, and in general, instruction gas costs are largely dominated by storage gas costs. - -Notably, however, there is still technically an incentive to reduce the number of function calls in a program, but engineering efforts are more effectively dedicated to writing modular, decomposed code that is geared toward reducing storage gas costs, rather than attempting to write repetitive code blocks with fewer nested functions (in nearly all cases). - -In extreme cases it is possible for instruction gas to far outweigh storage gas, for example if a loopwise mathematical function takes 10,000 iterations to converge; but again this is an extreme case and for most applications storage gas has a larger impact on base gas than does instruction gas. - -### Payload gas - -As of the time of this writing, [`transaction/mod.rs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs) defines the minimum amount of internal gas per transaction as 1,500,000 internal units (15,000 octas at minimum), an amount that increases by 2,000 internal gas units (20 octas minimum) per byte for payloads larger than 600 bytes, with the maximum number of bytes permitted in a transaction set at 65536. -Hence in practice, payload gas is unlikely to be a concern. - - - -[#4540]: https://github.com/aptos-labs/aptos-core/pull/4540/files -[`aptos-gas-schedule/src/`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src -[`aptos_global_constants`]: https://github.com/aptos-labs/aptos-core/blob/main/config/global-constants/src/lib.rs -[`base_8192_exponential_curve()`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/storage_gas.md#0x1_storage_gas_base_8192_exponential_curve -[BCS sequence specification]: https://github.com/diem/bcs#fixed-and-variable-length-sequences -[`gas_meter.rs`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas/src/gas_meter.rs -[`initialize()`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/storage_gas.md#0x1_storage_gas_initialize -[`instr.rs`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/instr.rs -[`move_stdlib.rs`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/move_stdlib.rs -[`on_reconfig()`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/storage_gas.md#@Specification_16_on_reconfig -[`storage_gas.md`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/storage_gas.md -[`storage_gas.move`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/storage_gas.move -[`StorageGas`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/storage_gas.md#resource-storagegas -[`table.rs`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/table.rs -[`transaction.rs`]: https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs diff --git a/developer-docs-site/docs/concepts/blockchain.md b/developer-docs-site/docs/concepts/blockchain.md deleted file mode 100755 index 85286b12e1873..0000000000000 --- a/developer-docs-site/docs/concepts/blockchain.md +++ /dev/null @@ -1,351 +0,0 @@ ---- -title: "Aptos Blockchain Deep Dive" -slug: "blockchain" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Blockchain Deep Dive - -For a deeper understanding of the lifecycle of an Aptos transaction (from an operational perspective), we will follow a transaction on its journey, from being submitted to an Aptos fullnode, to being committed to the Aptos blockchain. We will then focus on the logical components of Aptos nodes and take a look how the transaction interacts with these components. - -## Life of a Transaction - -* Alice and Bob are two users who each have an [account](../reference/glossary.md#account) on the Aptos blockchain. -* Alice's account has 110 Aptos Coins. -* Alice is sending 10 Aptos Coins to Bob. -* The current [sequence number](../reference/glossary.md#sequence-number) of Alice's account is 5 (which indicates that 5 transactions have already been sent from Alice's account). -* There are a total of 100 validator nodes — V1 to V100 on the network. -* An Aptos client submits Alice's transaction to a REST service on an Aptos Fullnode. The fullnode forwards this transaction to a validator fullnode which in turn forwards it to validator V1. -* Validator V1 is a proposer/leader for the current round. - -### The Journey - -In this section, we will describe the lifecycle of transaction T5, from when the client submits it to when it is committed to the Aptos blockchain. - -For the relevant steps, we've included a link to the corresponding inter-component interactions of the validator node. After you are familiar with all the steps in the lifecycle of the transaction, you may want to refer to the information on the corresponding inter-component interactions for each step. - -
- -
- -:::tip Alert -The arrows in all the visuals in this article originate on the component initiating an interaction/action and terminate on the component on which the action is being performed. The arrows do not represent data read, written, or returned. -::: - -The lifecycle of a transaction has five stages: - -* **Accepting**: [Accepting the transaction](#accepting-the-transaction) -* **Sharing**: [Sharing the transaction with other validator nodes](#sharing-the-transaction-with-other-validator-nodes) -* **Proposing**: [Proposing the block](#proposing-the-block) -* **Executing and Consensus**: [Executing the block and reaching consensus](#executing-the-block-and-reaching-consensus) -* **Committing**: [Committing the block](#committing-the-block) - -We've described what happens in each stage below, along with links to the corresponding Aptos node component interactions. - -:::warning - -Transactions are validated upon entering a mempool and prior to execution by consensus. The client only learns of validation results returned during the initial submission via the REST service. Transactions may silently fail to execute, especially in the case where the account has run out of utility token or changed its authentication key in the midst of many transactions. While this happens infrequently, there are ongoing efforts to improve the visibility in this space. - -::: - -### Client submits a transaction - -An Aptos **client constructs a raw transaction** (let's call it Traw5) to transfer 10 Aptos Coins from Alice’s account to Bob’s account. The Aptos client signs the transaction with Alice's private key. The signed transaction T5 includes the following: - -* The raw transaction. -* Alice's public key. -* Alice's signature. - -The raw transaction includes the following fields: - -| Fields | Description | -| ------ | ----------- | -| [Account address](../reference/glossary.md#account-address) | Alice's account address | -| Payload | Indicates an action or set of actions Alice's behalf. In the case this is a Move function, it directly calls into Move bytecode on the chain. Alternatively, it may be Move bytecode peer-to-peer [transaction script](../reference/glossary.md#transaction-script). It also contains a list of inputs to the function or script. For this example, it is a function call to transfer an amount of Aptos Coins from Alice account to Bob's account, where Alice's account is implied by sending the transaction and Bob's account and the amount are specified as transaction inputs. | -| [Gas unit price](../reference/glossary.md#gas-unit-price) | The amount the sender is willing to pay per unit of gas, to execute the transaction. This is represented as Octa or units of 10-8 Aptos utility tokens. -| [Maximum gas amount](../reference/glossary.md#maximum-gas-amount) | The maximum gas amount in Aptos utility tokens Alice is willing to pay for this transaction. Gas charges are equal to the base gas cost covered by computation and IO multiplied by the gas price. Gas costs also include storage with an Apt-fixed priced storage model. This is represents as Octa or units of 10-8 Aptos utility tokens. -| [Expiration time](../reference/glossary.md#expiration-time) | Expiration time of the transaction. | -| [Sequence number](../reference/glossary.md#sequence-number) | The sequence number (5, in this example) for an account indicates the number of transactions that have been submitted and committed on-chain from that account. In this case, 5 transactions have been submitted from Alice’s account, including Traw5. Note: a transaction with sequence number 5 can only be committed on-chain if the account sequence number is 5. | -| [Chain ID](https://github.com/aptos-labs/aptos-core/blob/main/types/src/chain_id.rs) | An identifier that distinguishes the Aptos networks (to prevent cross-network attacks). | - -### Accepting the transaction - -| Description | Aptos Node Component Interactions | -| ------------------------------------------------------------ | ---------------------------------------------------------- | -| 1. **Client → REST service**: The client submits transaction T5 to the REST service of an Aptos fullnode. The fullnode uses the REST service to forward the transaction to its own mempool, which then forwards the transaction to mempools running on other nodes in the network. The transaction will eventually be forwarded to a mempool running on a validator fullnode, which will send it to a validator node (V1 in this case). | [1. REST Service](#1-client--rest-service) | -| 2. **REST service → Mempool**: The fullnode's mempool transmits transaction T5 to validator V1's mempool. | [2. REST Service](#2-rest-service--mempool), [1. Mempool](#1-rest-service--mempool) | -| 3. **Mempool → Virtual Machine (VM)**: Mempool will use the virtual machine (VM) component to perform transaction validation, such as signature verification, account balance verification and replay resistance using the sequence number. | [4. Mempool](#4-mempool--vm), [3. Virtual Machine](#3-mempool--virtual-machine) | - - -### Sharing the transaction with other validator nodes - -| Description | Aptos Node Component Interactions | -| ------------------------------------------------------------ | -------------------------------- | -| 4. **Mempool**: The mempool will hold T5 in an in-memory buffer. Mempool may already contain multiple transactions sent from Alice's address. | [Mempool](#mempool) | -| 5. **Mempool → Other Validators**: Using the shared-mempool protocol, V1 will share the transactions (including T5) in its mempool with other validator nodes and place transactions received from them into its own (V1) mempool. | [2. Mempool](#2-mempool--other-validator-nodes) | - -### Proposing the block - -| Description | Aptos Node Component Interactions | -| ------------------------------------------------------------ | ---------------------------------------- | -| 6. **Consensus → Mempool**: — As validator V1 is a proposer/leader for this transaction, it will pull a block of transactions from its mempool and replicate this block as a proposal to other validator nodes via its consensus component. | [1. Consensus](#1-consensus--mempool), [3. Mempool](#3-consensus--mempool) | -| 7. **Consensus → Other Validators**: The consensus component of V1 is responsible for coordinating agreement among all validators on the order of transactions in the proposed block. | [2. Consensus](#2-consensus--other-validators) | - -### Executing the block and reaching consensus - -| Description | Aptos Node Component Interactions | -| ------------------------------------------------------------ | ------------------------------------------------ | -| 8. **Consensus → Execution**: As part of reaching agreement, the block of transactions (containing T5) is shared with the execution component. | [3. Consensus](#3-consensus--execution-consensus--other-validators), [1. Execution](#1-consensus--execution) | -| 9. **Execution → Virtual Machine**: The execution component manages the execution of transactions in the VM. Note that this execution happens speculatively before the transactions in the block have been agreed upon. | [2. Execution](#2-execution--vm), [3. Virtual Machine](#3-mempool--virtual-machine) | -| 10. **Consensus → Execution**: After executing the transactions in the block, the execution component appends the transactions in the block (including T5) to the [Merkle accumulator](../reference/glossary.md#merkle-accumulator) (of the ledger history). This is an in-memory/temporary version of the Merkle accumulator. The necessary part of the proposed/speculative result of executing these transactions is returned to the consensus component to agree on. The arrow from "consensus" to "execution" indicates that the request to execute transactions was made by the consensus component. | [3. Consensus](#3-consensus--execution-consensus--other-validators), [1. Execution](#1-consensus--execution) | -| 11. **Consensus → Other Validators**: V1 (the consensus leader) attempts to reach consensus on the proposed block's execution result with the other validator nodes participating in consensus. | [3. Consensus](#3-consensus--execution-consensus--other-validators) | - -### Committing the block - -| Description | Aptos Node Component Interactions | -| ------------------------------------------------------------ | ------------------------------------------------------------ | -| 12. **Consensus → Execution**, **Execution → Storage**: If the proposed block's execution result is agreed upon and signed by a set of validators that have the quorum of votes, validator V1's execution component reads the full result of the proposed block execution from the speculative execution cache and commits all the transactions in the proposed block to persistent storage with their results. | [4. Consensus](#4-consensus--execution), [3. Execution](#3-consensus--execution), [4. Execution](#4-execution--storage), [3. Storage](#3-execution--storage) | - -Alice's account will now have 100 Aptos Coins, and its sequence number will be 6. If T5 is replayed by Bob, it will be rejected as the sequence number of Alice's account (6) is greater than the sequence number of the replayed transaction (5). - -## Aptos node component interactions - -In the [Life of a Transaction](#life-of-a-transaction) section, we described the typical lifecycle of a transaction (from transaction submission to transaction commit). Now let's look at the inter-component interactions of Aptos nodes as the blockchain processes transactions and responds to queries. This information will be most useful to those who: - -* Would like to get an idea of how the system works under the covers. -* Are interested in eventually contributing to the Aptos blockchain. - -You can learn more about the different types of Aptos nodes here: -* [Validator nodes](../concepts/validator-nodes.md) -* [Fullnodes](../concepts/fullnodes.md) - -For our narrative, we will assume that a client submits a transaction TN to a validator VX. For each validator component, we will describe each of its inter-component interactions in subsections under the respective component's section. Note that subsections describing the inter-component interactions are not listed strictly in the order in which they are performed. Most of the interactions are relevant to the processing of a transaction, and some are relevant to clients querying the blockchain (queries for existing information on the blockchain). - -The following are the core components of an Aptos node used in the lifecycle of a transaction: - -**Fullnode** - -* [REST Service](#rest-service) - -**Validator node** - -* [Mempool](#mempool) -* [Consensus](#consensus) -* [Execution](#execution) -* [Virtual Machine](#virtual-machine-vm) -* [Storage](#storage) - -## REST Service - -
- -
- -Any request made by a client goes to the REST Service of a fullnode first. Then, the submitted transaction is forwarded to the validator fullnode, which then sends it to the validator node VX. - -### 1. Client → REST Service - -A client submits a transaction to the REST service of an Aptos fullnode. - -### 2. REST Service → Mempool - -The REST service of the fullnode transfers the transaction to its mempool. After mempool does some initial checks, the REST Service will return a status to the client indicating whether the transaction was accepted or rejected. For example, out-of-date transactions will be rejected: mempool will accept the transaction TN only if the sequence number of TN is greater than or equal to the current sequence number of the sender's account. - -### 3. Mempool -> Mempool - -The mempool on the fullnode sends the transaction to the mempool of a validator fullnode, which then sends the transaction to validator node VX's mempool. Note that the transaction will not be sent to the next mempool (or passed to consensus) until the sequence number matches the sequence number of the sender’s account. Furthermore, each mempool performs the same initial checks upon receiving a transaction, this may result in a transaction being discarded on its way to consensus. The current implementation of mempool does not provide any feedback if a transaction is discarded during this process. - -### 4. REST Service → Storage - -When a client performs a read query on the Aptos blockchain (for example, to get the balance of Alice's account), the REST service interacts with the storage component directly to obtain the requested information. - -## Virtual Machine (VM) - -
- -
- -The Move VM verifies and executes transaction scripts written in Move bytecode. - -### 1. Virtual Machine → Storage - -When mempool requests the VM to validate a transaction via `VMValidator::validate_transaction()`, the VM loads the transaction sender's account from storage and performs verifications, some of which have been described in the list below. - -* Checks that the input signature on the signed transaction is correct (to reject incorrectly signed transactions). -* Checks that the sender's account authentication key is the same as the hash of the public key (corresponding to the private key used to sign the transaction). -* Verifies that the sequence number for the transaction is greater than or equal to the current sequence number for the sender's account. Completing this check prevents the replay of the same transaction against the sender's account. -* Verifies that the program in the signed transaction is not malformed, as a malformed program cannot be executed by the VM. -* Verifies that the sender's account balance contains at least the maximum gas amount multiplied by the gas price specified in the transaction, which ensures that the transaction can pay for the resources it uses. - -### 2. Execution → Virtual Machine - -The execution component utilizes the VM to execute a transaction via `ExecutorTask::execute_transaction()`. - -It is important to understand that executing a transaction is different from updating the state of the ledger and persisting the results in storage. A transaction TN is first executed as part of an attempt to reach agreement on blocks during consensus. If agreement is reached with the other validators on the ordering of transactions and their execution results, the results are persisted in storage and the state of the ledger is updated. - -### 3. Mempool → Virtual Machine - -When mempool receives a transaction from other validators via shared mempool or from the REST service, mempool invokes `VMValidator::validate_transaction()` on the VM to validate the transaction. - -For implementation details refer to the [Move Virtual Machine README](https://github.com/move-language/move/tree/main/language/move-vm). - -## Mempool - -
- -
- -Mempool is a shared buffer that holds the transactions that are “waiting” to be executed. When a new transaction is added to the mempool, the mempool shares this transaction with other validator nodes in the system. To reduce network consumption in the “shared mempool,” each validator is responsible for delivering its own transactions to other validators. When a validator receives a transaction from the mempool of another validator, the transaction is added to the mempool of the recipient validator. - -### 1. REST Service → Mempool - -* After receiving a transaction from the client, the REST service sends the transaction to its own mempool, which then shares the transaction with the mempool of a validator fullnode. The mempool on the validator fullnode then shares the transaction with the mempool of a validator. -* The mempool for validator node VX accepts transaction TN for the sender's account only if the sequence number of TN is greater than or equal to the current sequence number of the sender's account. - -### 2. Mempool → Other validator nodes - -* The mempool of validator node VX shares transaction TN with the other validators on the same network. -* Other validators share the transactions in their respective mempools with VX’s mempool. - -### 3. Consensus → Mempool - -* When the transaction is forwarded to a validator node and once the validator node becomes the leader, its consensus component will pull a block of transactions from its mempool and replicate the proposed block to other validators. It does this to arrive at a consensus on the ordering of transactions and the execution results of the transactions in the proposed block. -* Note that just because a transaction TN was included in a proposed consensus block, it does not guarantee that TN will eventually be persisted in the distributed database of the Aptos blockchain. - - -### 4. Mempool → VM - -When mempool receives a transaction from other validators, mempool invokes VMValidator::validate_transaction() on the VM to validate the transaction. - -## Consensus - -
- -
- -The consensus component is responsible for ordering blocks of transactions and agreeing on the results of execution by participating in the [consensus protocol](../reference/glossary.md#consensus-protocol) with other validators in the network. - - -### 1. Consensus → Mempool - -When validator VX is a leader/proposer, the consensus component of VX pulls a block of transactions from its mempool via: `Mempool::get_batch()`, and forms a proposed block of transactions. - -### 2. Consensus → Other Validators - -If VX is a proposer/leader, its consensus component replicates the proposed block of transactions to other validators. - -### 3. Consensus → Execution, Consensus → Other Validators - -* To execute a block of transactions, consensus interacts with the execution component. Consensus executes a block of transactions via `BlockExecutorTrait::execute_block()` (Refer to [Consensus → execution](#1-consensus--execution)) -* After executing the transactions in the proposed block, the execution component responds to the consensus component with the result of executing these transactions. -* The consensus component signs the execution result and attempts to reach agreement on this result with other validators. - -### 4. Consensus → Execution - -If enough validators vote for the same execution result, the consensus component of VX informs execution via `BlockExecutorTrait::commit_blocks()` that this block is ready to be committed. - -## Execution - -
- -
- -The execution component coordinates the execution of a block of transactions and maintains a transient state that can be voted upon by consensus. If these transactions are successful, they are committed to storage. - -### 1. Consensus → Execution - -* Consensus requests execution to execute a block of transactions via: `BlockExecutorTrait::execute_block()`. -* Execution maintains a “scratchpad,” which holds in-memory copies of the relevant portions of the [Merkle accumulator](../reference/glossary.md#merkle-accumulator). This information is used to calculate the root hash of the current state of the Aptos blockchain. -* The root hash of the current state is combined with the information about the transactions in the proposed block to determine the new root hash of the accumulator. This is done prior to persisting any data, and to ensure that no state or transaction is stored until agreement is reached by a quorum of validators. -* Execution computes the speculative root hash and then the consensus component of VX signs this root hash and attempts to reach agreement on this root hash with other validators. - -### 2. Execution → VM - -When consensus requests execution to execute a block of transactions via `BlockExecutorTrait::execute_block()`, execution uses the VM to determine the results of executing the block of transactions. - -### 3. Consensus → Execution - -If a quorum of validators agrees on the block execution results, the consensus component of each validator informs its execution component via `BlockExecutorTrait::commit_blocks()` that this block is ready to be committed. This call to the execution component will include the signatures of the validators to provide proof of their agreement. - -### 4. Execution → Storage - -Execution takes the values from its “scratchpad” and sends them to storage for persistence via `DbWriter::save_transactions()`. Execution then prunes the old values from the “scratchpad” that are no longer needed (for example, parallel blocks that cannot be committed). - -For implementation details refer to the [Execution README](https://github.com/aptos-labs/aptos-core/tree/main/execution). - -## Storage - -
- -
- -The storage component persists agreed upon blocks of transactions and their execution results to the Aptos blockchain. A block of transactions (which includes transaction TN) will be saved via storage when there is agreement between more than a quorum (2f+1) of the validators participating in consensus. Agreement must include all of the following: -* The transactions to include in the block -* The order of the transactions -* The execution results of the transactions in the block - -Refer to [Merkle accumulator](../reference/glossary.md#merkle-accumulator) for information on how a transaction is appended to the data structure representing the Aptos blockchain. - -### 1. VM → Storage - -When mempool invokes `VMValidator::validate_transaction()` to validate a transaction, `VMValidator::validate_transaction()` loads the sender's account from storage and performs read-only validity checks on the transaction. - -### 2. Execution → Storage - -When the consensus component calls `BlockExecutorTrait::execute_block()`, execution reads the current state from storage combined with the in-memory “scratchpad” data to determine the execution results. - -### 3. Execution → Storage - -Once consensus is reached on a block of transactions, execution calls storage via `DbWriter::save_transactions()` to save the block of transactions and permanently record them. This will also store the signatures from the validator nodes that agreed on this block of transactions. The in-memory data in “scratchpad” for this block is passed to update storage and persist the transactions. When the storage is updated, every account that was modified by these transactions will have its sequence number incremented by one. - -Note: The sequence number of an account on the Aptos blockchain increments by one for each committed transaction originating from that account. - -### 4. REST Service → Storage - -For client queries that read information from the blockchain, the REST service directly interacts with storage to read the requested information. - -For implementation details refer to the [Storage README](https://github.com/aptos-labs/aptos-core/tree/main/storage). diff --git a/developer-docs-site/docs/concepts/blocks.md b/developer-docs-site/docs/concepts/blocks.md deleted file mode 100644 index 25c8cf5d59170..0000000000000 --- a/developer-docs-site/docs/concepts/blocks.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -title: "Blocks" -id: "blocks" ---- - -# Blocks - -Aptos is a per-transaction versioned database. When transactions are executed, the resulting state of each transaction is stored separately and thus allows for more granular data access. This is different from other blockchains where only the resulting state of a block (a group of transactions) is stored. - -Blocks are still a fundamental unit within Aptos. Transactions are batched and executed together in a block. In addition, the [proofs](./txns-states.md#proofs) within storage are at the block-level granularity. The number of transactions within a block varies depending on network activity and a configurable maximum block size limit. As the blockchain becomes busier, blocks will likely contain more transactions. - -## System transactions - -Each Aptos block contains both user transactions and special system transactions to *mark* the beginning and end of the transaction batch. Specifically, there are two system transactions: -1. `BlockMetadataTransaction` - is inserted at the beginning of the block. A `BlockMetadata` transaction can also mark the end of an [epoch](#epoch) and trigger reward distribution to validators. -2. `StateCheckpointTransaction` - is appended at the end of the block and is used as a checkpoint milestone. - -## Epochs - -In Aptos, epochs represent a longer period of time in order to safely synchronize major changes such as validator set additions/removals. An epoch is a fixed duration of time, currently defined as two hours on mainnet. The number of blocks in an epoch depends on how many blocks can execute within this period of time. It is only at the start of a new epoch that major changes such as a validator joining the validator set don't immediately take effect among the validators. diff --git a/developer-docs-site/docs/concepts/delegated-staking.md b/developer-docs-site/docs/concepts/delegated-staking.md deleted file mode 100644 index 7fb7d6dcc9d44..0000000000000 --- a/developer-docs-site/docs/concepts/delegated-staking.md +++ /dev/null @@ -1,136 +0,0 @@ ---- -title: "Delegated Staking" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Delegated Staking - -## Delegated Staking on the Aptos Blockchain - -:::tip We strongly recommend that you read about [Staking](../concepts/staking.md) first. -::: - -Delegated staking is an extension of the staking protocol. A delegation pool abstracts the stake owner to an entity capable of collecting stake from delegators and adding it on their behalf to the native stake pool attached to the validator. This allows multiple entities to form a stake pool that achieves the minimum requirements for the validator to join the validator set. While delegators can add stake to an inactive pool, the delegation pool will not earn rewards until it is active. - -:::danger Delegation pools are permissionless and anyone can add stake. Delegation pools cannot be changed to stake pools once it's created or vice versa, though it can be removed from the validator set and assets withdrawn. For full details of the stake pool, see [Staking](../concepts/staking.md) -::: - -For the full delegation pool smart contract, see [delegation_pool.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/delegation_pool.move) - -Unlike a stake pool, a delegation pool can be initialized with zero stake. When initialized, the delegated stake pool is owned indirectly via a resource account. This account will manage the stake of the underlying stake pool on behalf of the delegators by forwarding their stake-management operations to it (add, unlock, reactivate, withdraw) while the resource account cannot be directly accessed nor externally owned. - -See full list of [Delegation Pool Operations](../nodes/validator-node/operator/delegation-pool-operations.md) - -![image](https://user-images.githubusercontent.com/120680608/234953723-ae6cc89e-76d8-4014-89f3-ec8799c7b281.png) - - -There are four entity types: - -- Owner -- Operator -- Voter -- Delegator - - -Using this model, the owner does not have to stake on the Aptos blockchain in order to run a validator. - - -[How Validation on the Aptos blockchain works](../concepts/staking.md#validation-on-the-aptos-blockchain) - - -### Owner - -The delegation pool owner has the following capabilities: - -1. Creates delegation pool -2. Assigns operator for the delegation pool -3. Sets operator commission percentage for the delegation pool -4. Assigns voter for the delegation pool - -### Operator - -A node operator is assigned by the pool owner to run the validator node. The operator has the following capabilities: - -1. Join or leave the validator set once the delegation pool reaches 1M APT -2. Perform validating functions -3. Change the consensus key and network addresses. The consensus key is used to participate in the validator consensus process, i.e., to vote and propose a block. The operator is allowed to change ("rotate") this key in case this key is compromised. - -The operator receives commission that is distributed automatically at the end of each epoch as rewards. - -### Voter - -An owner can designate a voter. This enables the voter to participate in governance. The voter will use the voter key to sign the governance votes in the transactions. - -:::tip Governance -This document describes staking. See [Governance](./governance.md) for how to participate in the Aptos on-chain governance using the owner-voter model. -::: - -### Delegator - -A delegator is anyone who has stake in the delegation pool. Delegators earn rewards on their stake minus any commissions for the operator. Delegators can perform the following delegator operations: - -1. Add stake -2. Unlock stake -3. Reactivate stake -4. Withdraw stake - -## Validator flow - -:::tip Delegation pool operations -See [Delegation pool operations](../nodes/validator-node/operator/delegation-pool-operations.md) for the correct sequence of commands to run for the below flow. -::: - -1. [Operator deploys validator node](../nodes/validator-node/operator/running-validator-node/index.md) -2. [Run command to get delegation pool address](../nodes/validator-node/operator/delegation-pool-operations.md#connect-to-aptos-network) -3. [Operator connects to the network using pool address derived in step 2](../nodes/validator-node/operator/connect-to-aptos-network.md) -4. [Owner initializes the delegation pool and sets operator](../nodes/validator-node/operator/delegation-pool-operations.md#initialize-a-delegation-pool) -5. Delegators can add stake at any time -6. When the delegation pool reaches 1M APT, the operator can call aptos node join-validator-set to join the active validator set. Changes will be effective in the next epoch. -7. Validator validates (proposes blocks as a leader-validator) and gains rewards. Rewards are distributed to delegators proportionally to stake amount. The stake will automatically be locked up for a fixed duration (set by governance) and automatically renewed at expiration. -8. At any point, if the operator wants to update the consensus key or validator network addresses, they can call aptos node update-consensus-key or aptos node update-validator-network-addresses. Similar to changes to stake, the changes to consensus key or validator network addresses are only effective in the next epoch. -9. Delegators can request to unlock their stake at any time. However, their stake will only become withdrawable when the delegation pool lockup expires. -10. Validator can either explicitly leave the validator set by calling aptos node leave-validator-set or if their stake drops below the min required, they would get removed at the end of the epoch. - - -## Joining the validator set - -Participating as a delegation validator node on the Aptos network works like this: - -1. Operator runs a validator node and configures the on-chain validator network addresses and rotates the consensus key. -2. Owner initializes the delegation pool. -3. The validator node cannot sync until the delegation pool becomes active. The delegation pool becomes active when it reaches 1M APT. -4. Operator validates and gains rewards. -5. The stake pool is automatically locked up for a fixed duration (set by the Aptos governance) and will be automatically renewed at expiration. Commissions are automatically distributed to the operator as rewards. The operator can unlock stake at any time, but cannot withdraw until the delegation pool’s lockup period expires. -6. Operator must wait until the new epoch starts before their validator becomes active. - -:::tip Joining the validator set -For step-by-step instructions on how to join the validator set, see: [Joining Validator Set](../nodes/validator-node/operator/staking-pool-operations.md#joining-validator-set). -::: - -### Automatic lockup duration - -When the operator joins the validator set, the delegation pool's stake will automatically be locked up for a fixed duration that is set by the Aptos governance. Delegators will follow the delegation pool's lockup cycle. - -### Automatic lockup renewal - -When the lockup period expires, it will be automatically renewed, so that the validator can continue to validate and receive the rewards. - -### Unlocking your stake - -Delegators can unlock stake at any time. However, the stake will only become withdrawable after the delegation pool's lockup period expires. Unlocked stake will continue earning rewards until the stake becomes withdrawable. - -### Resetting the lockup - -Lockup cannot be reset. - -## Rewards - -Rewards for delegated staking are calculated by using: - -1. The rewards_rate, an annual percentage yield (APY), i.e., rewards accrue as a compound interest on your current staked amount. -2. Delegator stake -3. [Validator rewards performance](../concepts/staking.md#rewards-formula) - -See [Computing delegation pool rewards](../nodes/validator-node/operator/delegation-pool-operations.md#compute-delegation-pool-rewards-earned) diff --git a/developer-docs-site/docs/concepts/events.md b/developer-docs-site/docs/concepts/events.md deleted file mode 100755 index 4e1e7a4aeac25..0000000000000 --- a/developer-docs-site/docs/concepts/events.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: "Events" -slug: "events" ---- - -Events are emitted during the execution of a transaction. Each Move module can define its own events and choose when to emit the events upon execution of the module. Aptos Move supports two form of events: module events and EventHandle events. Module events are the modern event mechanism and shipped in the framework release 1.7. EventHandle events are deprecated and shipped with the original framework. Because of how blockchains work, EventHandle events will likely never be fully removed from Aptos. - -# Module Events - -Module events are global event streams identified by a struct type. To define an event struct, add the attribute `#[event]` to a normal Move struct that has `drop` and `store` abilities. For example, - -``` -/// 0xcafe::my_module_name -/// An example module event struct denotes a coin transfer. -#[event] -struct TransferEvent has drop, store { - sender: address, - receiver: address, - amount: u64 -} -``` - -And then create and emit the event: - -``` -// Define an event. -let event = TransferEvent { - sender: 0xcafe, - receiver: 0xface, - amount: 100 -}; -// Emit the event just defined. -0x1::event::emit(event); -``` - -Example module events are available [here](https://explorer.aptoslabs.com/txn/682252266/events?network=testnet). Indices 0, 1, 2 are three module events of -type `0x66c34778730acbb120cefa57a3d98fd21e0c8b3a51e9baee530088b2e444e94c::event::MyEvent`. For API compatibility, module events contain the fields `Account Address`, `Creation Number` and `Sequence Number` with all set to 0. - -![Module event example](../../static/img/module-event.png "Module event example") - -## Access in Tests - -Events are stored in a separate merkle tree called event accumulator for each transaction. As it is ephemeral and hence independent from the state tree, MoveVM does not have read access to events when executing transaction in production. But in tests, Aptos Move supports two native functions that read emitted events for testing and debugging purposes: - -```rust -/// Return all emitted module events with type T as a vector. -# [test_only] -public native fun emitted_events(): vector; - -/// Return true iff `msg` was emitted. -# [test_only] -public fun was_event_emitted(msg: & T): bool -``` - -## API Access - -The API for querying module event is under construction. [GraphQL API](https://aptos.dev/guides/system-integrators-guide/#production-network-access) support remains to query both module events and EventHandle events. - -# Event-Handle Events (Deprecated) - -As part of our legacy, Aptos inherited the Libra/Diem event streams derived from EventHandles. Where each EventHandle is identified by a globally unique value, GUID, and a per-event sequence number and stored within a resource. Each event within a stream has a unique sequence number derived from the EventHandle sequence number. - -For example, during a [coin transfer](../tutorials/first-transaction.md), both the sender and receiver's accounts will emit `SentEvent` and `ReceivedEvent`, respectively. This data is stored within the ledger and can be queried via the REST interface's [Get events by event handle](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_events_by_event_handle). - -Assuming that an account `0xc40f1c9b9fdc204cf77f68c9bb7029b0abbe8ad9e5561f7794964076a4fbdcfd` had sent coins to another account, the following query could be made to the REST interface: `https://fullnode.devnet.aptoslabs.com/v1/accounts/c40f1c9b9fdc204cf77f68c9bb7029b0abbe8ad9e5561f7794964076a4fbdcfd/events/0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>/withdraw_events`. The output would be all `WithdrawEvent`s stored on that account, it would look like - -```json -[ - { - "key": "0x0000000000000000caa60eb4a01756955ab9b2d1caca52ed", - "sequence_number": "0", - "type": "0x1::coin::WithdrawEvent", - "data": { - "amount": "1000" - } - } -] -``` - -Each registered event has a unique `key`. The key `0x0000000000000000caa60eb4a01756955ab9b2d1caca52ed` maps to the event `0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>/sent_events` registered on account `0xc40f1c9b9fdc204cf77f68c9bb7029b0abbe8ad9e5561f7794964076a4fbdcfd`. This key can then be used to directly make event queries, e.g., `https://fullnode.devnet.aptoslabs.com/v1/events/0x0000000000000000caa60eb4a01756955ab9b2d1caca52ed`. - -These represent event streams, or a list of events with each entry containing a sequentially increasing `sequence_number` beginning at `0`, a `type`, and `data`. Each event must be defined by some `type`. There may be multiple events defined by the same or similar `type`s especially when using generics. Events have associated `data`. The general principle is to include all data necessary to understand the changes to the underlying resources before and after the execution of the transaction that changed the data and emitted the event. - -[coin_transfer]: https://github.com/aptos-labs/aptos-core/blob/bdd0a7fe82cd6aab4b47250e5eb6298986777cf7/aptos-move/framework/aptos-framework/sources/coin.move#L412 - -[get_events]: https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_events_by_event_handle - -## Migration to Module Events - -With the release of module events, EventHandle events are deprecated. To support migration to the module events, projects should emit a module event wherever they currently emit EventHandle events. Once external systems have sufficiently adopted module events, the legacy event may no longer need to be emitted. - -Note, the EventHandle events cannot and will not be deleted and hence projects that are unable to upgrade will continue to be able to leverage them. diff --git a/developer-docs-site/docs/concepts/fullnodes.md b/developer-docs-site/docs/concepts/fullnodes.md deleted file mode 100755 index be38fc274702f..0000000000000 --- a/developer-docs-site/docs/concepts/fullnodes.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: "Fullnodes Overview" -slug: "fullnodes" ---- -An Aptos node is an entity of the Aptos ecosystem that tracks the [state](../reference/glossary.md#state) of the Aptos blockchain. Clients interact with the blockchain via Aptos nodes. There are two types of nodes: -* [Validator nodes](./validator-nodes.md) -* Fullnodes - -Each Aptos node comprises several logical components: -* [REST service](../reference/glossary.md#rest-service) -* [Mempool](./validator-nodes.md#mempool) -* [Execution](./validator-nodes.md#execution) -* [Virtual Machine](./validator-nodes.md#virtual-machine) -* [Storage](./validator-nodes.md#storage) -* [State synchronizer](./validator-nodes.md#state-synchronizer) - -The [Aptos-core](../reference/glossary.md#aptos-core) software can be configured to run as a validator node or as a fullnode. - -## Overview - -Fullnodes can be run by anyone. Fullnodes verify blockchain history by either re-executing all transactions in the history of the Aptos blockchain or replaying each transaction's output. Fullnodes replicate the entire state of the blockchain by synchronizing with upstream participants, e.g., other fullnodes or validator nodes. To verify blockchain state, fullnodes receive the set of transactions and the [accumulator hash root](../reference/glossary.md#accumulator-root-hash) of the ledger signed by the validators. In addition, fullnodes accept transactions submitted by Aptos clients and forward them directly (or indirectly) to validator nodes. While fullnodes and validators share the same code, fullnodes do not participate in consensus. - -Depending on the fullnode upstream, a fullnode can be called as a validator fullnode, or a public fullnode: -* **Validator fullnode** state sync from a validator node directly. -* **Public fullnode** state sync from other fullnodes. - -There's no difference in their functionality, only whether their upstream node is a validator or another fullnode. Read more details about network topology [here](./node-networks-sync.md) - -Third-party blockchain explorers, wallets, exchanges, and DApps may run a local fullnode to: -* Leverage the REST interface for blockchain interactions. -* Get a consistent view of the Aptos ledger. -* Avoid rate limitations on read traffic. -* Run custom analytics on historical data. -* Get notifications about particular on-chain events. diff --git a/developer-docs-site/docs/concepts/gas-txn-fee.md b/developer-docs-site/docs/concepts/gas-txn-fee.md deleted file mode 100755 index e72d6cdc9a299..0000000000000 --- a/developer-docs-site/docs/concepts/gas-txn-fee.md +++ /dev/null @@ -1,190 +0,0 @@ ---- -title: "Gas and Storage Fees" -slug: "gas-txn-fee" ---- - -# Gas and Storage Fees - -Any transaction execution on the Aptos blockchain requires a processing fee. As of today, this fee comprises two components: -1. Execution & IO costs - - This covers your usage of transient computation resources, such as processing your transactions and propagating the validated record throughout the distributed network of the mainnet. - - It is measured in Gas Units whose price may fluctuate according to the load of the network. This allows execution & io costs to be low when the network is less busy. - - This portion of gas is burned permanently upon the execution of a transaction. -2. Storage fees - - This covers the cost to persistently store validated record in the distributed blockchain storage. - - It is measured in fixed APT prices, so the permanent storage cost stays stable even as the gas unit price fluctuates with the network's transient load. - - The storage fee can be refunded when the allocated storage space is deleted. The refund amount may be full or partial, based on the size and duration of the storage used. - - To keep system implementation simple, this portion of gas is burned and minted again upon refund. - -:::tip -Conceptually, this fee can be thought of as quite similar to how we pay for our home electric or water utilities. -::: - -## Unit of gas - -Transactions can range from simple and inexpensive to complicated based upon what they do. In the Aptos blockchain, a **unit of gas** represents a basic unit of consumption for transient resources, such as doing computation or accessing the storage. The latter should not be conflated with the long-term storage aspect of such operations, as that is covered by the storage fees separately. - -See [How Base Gas Works](./base-gas.md) for a detailed description of gas fee types and available optimizations. - -:::tip Unit of gas -👉 A **unit of gas** is a dimensionless number or a unit that is not associated with any one item such as a coin, expressed as an integer. The total gas units consumed by your transaction depend on the complexity of your transaction. The **gas price**, on the other hand, is expressed in terms of Aptos blockchain’s native coin (Octas). Also see [Transactions and States](txns-states.md) for how a transaction submitted to the Aptos blockchain looks like. -::: - -## The Fee Statement - -As of Aptos Framework release 1.7, the breakdown of fee charges and refunds is emitted as a module event represented by struct `0x1::transaction_fee::FeeStatement`. - -```Rust - #[event] - /// Breakdown of fee charge and refund for a transaction. - /// The structure is: - /// - /// - Net charge or refund (not in the statement) - /// - total charge: total_charge_gas_units, matches `gas_used` in the on-chain `TransactionInfo`. - /// This is the sum of the sub-items below. Notice that there's potential precision loss when - /// the conversion between internal and external gas units and between native token and gas - /// units, so it's possible that the numbers don't add up exactly. -- This number is the final - /// charge, while the break down is merely informational. - /// - gas charge for execution (CPU time): `execution_gas_units` - /// - gas charge for IO (storage random access): `io_gas_units` - /// - storage fee charge (storage space): `storage_fee_octas`, to be included in - /// `total_charge_gas_unit`, this number is converted to gas units according to the user - /// specified `gas_unit_price` on the transaction. - /// - storage deletion refund: `storage_fee_refund_octas`, this is not included in `gas_used` or - /// `total_charge_gas_units`, the net charge / refund is calculated by - /// `total_charge_gas_units` * `gas_unit_price` - `storage_fee_refund_octas`. - /// - /// This is meant to emitted as a module event. - struct FeeStatement has drop, store { - /// Total gas charge. - total_charge_gas_units: u64, - /// Execution gas charge. - execution_gas_units: u64, - /// IO gas charge. - io_gas_units: u64, - /// Storage fee charge. - storage_fee_octas: u64, - /// Storage fee refund. - storage_fee_refund_octas: u64, - } -``` - -## Gas price and prioritizing transactions - -In the Aptos network, the Aptos governance sets the absolute minimum gas unit price. However, the market determines how quickly a transaction with a particular gas unit price is processed. See [Ethereum Gas Tracker](https://etherscan.io/gastracker), for example, which shows the market price movements of Ethereum gas price. - -By specifying a higher gas unit price than the current market price, you can **increase** the priority level for your transaction on the blockchain by paying a larger processing fee. As part of consensus, when the leader selects transactions from its mempool to propose as part of the next block, it will prioritize selecting transactions with a higher gas unit price. Please note that higher gas fees only prioritize transaction selection for the next block. - -However, within a block, the order of transaction execution is determined by the system. This order is based on [transaction shuffling](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-27.md), which makes parallel execution more efficient by considering conflict patterns. While in most cases this is unnecessary, if the network is under load this measure can ensure your transaction is processed more quickly. See the `gas_unit_price` entry under [Estimating the gas units via simulation](#estimating-the-gas-units-via-simulation) for details. - -:::caution Increasing gas unit price with in-flight transactions -👉 If you are increasing gas unit price, but have in-flight (uncommitted) transactions for the same account, you should resubmit all of those transactions with the higher gas unit price. This is because transactions within the same account always have to respect sequence number, so effectively the higher gas unit price transaction will increase priority only after the in-flight transactions are included in a block. -::: - -## Specifying gas fees within a transaction - -When a transaction is submitted to the Aptos blockchain, the transaction must contain the following mandatory gas fields: - -- `max_gas_amount`: The maximum number of gas units that the transaction sender is willing to spend to execute the transaction. This determines the maximum computational resources that can be consumed by the transaction. -- `gas_price`: The gas price the transaction sender is willing to pay. It is expressed in Octa units, where 1 Octa equals 10-8 Aptos utility token. - - During the transaction execution, the total gas amount, expressed as: - ``` - (total gas units consumed) * (gas_price) - ``` - must not exceed `max_gas_amount`, or else the transaction will abort the execution. - -The transaction fee charged to the client will be at the most `gas_price * max_gas_amount`. - -## Gas parameters set by governance - -The following gas parameters are set by Aptos governance. - -:::tip On-chain gas schedule -These on-chain gas parameters are published on the Aptos blockchain at `0x1::gas_schedule::GasScheduleV2`. -::: - -- `txn.maximum_number_of_gas_units`: Maximum number of gas units that can be spent (this is the maximum allowed value for the `max_gas_amount` gas parameter in the transaction). This is to ensure that the dynamic pricing adjustments do not exceed how much you are willing to pay in total. -- `txn.min_transaction_gas_units`: Minimum number of gas units that can be spent. The `max_gas_amount` value in the transaction must be set to greater than this parameter’s value. - -There also exists some global per-category limits: -- `txn.max_execution_gas`: The maximum number of gas units a transaction can spend on execution. -- `txn.max_io_gas`: The maximum number of gas units a transaction can spend on IO. -- `txn.max_storage_fee`: The maximum amount of APT a transaction can spend on persistent storage. -These limits help decouple one category from another, allowing us to set `txn.maximum_number_of_gas_units` generously without having to worry about abuses. - -## Calculating Storage Fees - -The storage fee for a transaction is calculated based on the following factors: -1. The size of the transaction itself -2. The number of new storage slots used and bytes written -3. The events emitted -For details, see [How Base Gas Works](./base-gas.md). - -It should be noted that due to some backward compatibility reasons, the total storage fee of a transaction is currently presented to the client as part of the total `gas_used`. This means, this amount could vary based on the gas unit price even for the same transaction. - -Here is an example. Suppose we have a transaction that costs `100` gas units in execution & IO, and `5000` Octa in storage fees. The network will show that you have used -- `100 + 5000 / 100 = 150` gas units if the gas unit price is `100`, or -- `100 + 5000 / 200 = 125` gas units if the unit price is `200`. - -We are aware of the confusion this might create, and plan to present these as separate items in the future. However this will require some changes to the transaction output format and downstream clients, so please be patient while we work hard to make this happen. - -## Calculating Storage Deletion Refund - -If a transaction deletes state items, a refund is issued to the transaction payer for the released storage slots. Currently, a full refund is issued for the slot's fee, excluding any fees for excess bytes beyond a set quota (e.g., 1KB). However, fees for event emissions are not refundable. - -The refund amount is denominated in APT and is not converted to gas units or included in the total `gas_used`. Instead, this refund amount is specifically detailed in the `storage_fee_refund_octas` field of the [`FeeStatement`](#the-fee-statement). As a result, the transaction's net effect on the payer's APT balance is determined by `gas_used * gas_unit_price - storage_refund`. If the result is positive, there is a deduction from the account balance; if negative, there is a deposit. - -## Examples - -### Example 1: Account balance vs transaction fee - -**The sender’s account must have sufficient funds to pay for the transaction fee.** - -If, let's say, you transfer all the money out of your account so that you have no remaining balance to pay for the transaction fee. In such case the Aptos blockchain would let you know that the transaction will fail, and your transfer wouldn't succeed either. - -### Example 2: Transaction amounts vs transaction fee - -**Transaction fee is independent of transfer amounts in the transaction.** - -In a transaction, for example, transaction A, you are transferring 1000 coins from one account to another account. In a second transaction B, with the same gas field values of transaction A, you now transfer 100,000 coins from one account to another one account. Assuming that both the transactions A and B are sent roughly at the same time, then the gas costs for transactions A and B would be near-identical. - -## Estimating gas consumption via simulation - -The gas used for a transaction can be estimated by simulating the transaction on chain as described here or locally via the gas profiling feature of the Aptos CLI. The results of the simulated transaction represent the **exact** amount that is needed at the **exact** state of the blockchain at the time of the simulation. These gas units used may change based on the state of the chain. For this reason, any amount coming out of the simulation is only an estimate, and when setting the max gas amount, it should include an appropriate amount of headroom based upon your comfort-level and historical behaviors. Setting the max gas amount too low will result in the transaction aborting and the account being charged for whatever gas was consumed. - -To simulate transactions on chain, used the [`SimulateTransaction`](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/simulate_transaction) API. This API will run the exact transaction that you plan to run. - -To simulate the transaction locally, use the gas profiler, which is integrated into the Aptos CLI. -This will generate a web-based report to help you understand the precise gas usage of your transaction. -See [Gas Profiling](../move/move-on-aptos/gas-profiling) for more details. - -:::tip -Note that the `Signature` provided on the transaction must be all zeros. This is to prevent someone from using the valid signature. -::: - -To simulate the transaction, there are two flags: - -1. `estimate_gas_unit_price`: This flag will estimate the gas unit price in the transaction using the same algorithm as the [`estimate_gas_price`](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/estimate_gas_price) API. -2. `estimate_max_gas_amount`: This flag will find the maximum possible gas you can use, and it will simulate the transaction to tell you the actual `gas_used`. - -### Simulation steps - -The simulation steps for finding the correct amount of gas for a transaction are as follows: - -1. Estimate the gas via simulation with both `estimate_gas_unit_price` and `estimate_max_gas_amount` set to `true`. -2. Use the `gas_unit_price` in the returned transaction as your new transaction’s `gas_unit_price`. -3. View the `gas_used * gas_unit_price` values in the returned transaction as the **lower bound** for the cost of the transaction. -4. To calculate the upper bound of the cost, take the **minimum** of the `max_gas_amount` in the returned transaction, and the `gas_used * safety factor`. In the CLI a value of `1.5` is used for `safety factor`. Use this value as `max_gas_amount` for the transaction you want to submit. Note that the **upper bound** for the cost of the transaction is `max_gas_amount * gas_unit_price`, i.e., this is the most the sender of the transaction is charged. -5. At this point you now have your `gas_unit_price` and `max_gas_amount` to submit your transaction as follows: - 1. `gas_unit_price` from the returned simulated transaction. - 2. `max_gas_amount` as the minimum of the `gas_used` * `a safety factor` or the `max_gas_amount` from the transaction. -6. If you feel the need to prioritize or deprioritize your transaction, adjust the `gas_unit_price` of the transaction. Increase the value for higher priority, and decrease the value for lower priority. - -:::tip -Prioritization is based upon buckets of `gas_unit_price`. The buckets are defined in [`mempool_config.rs`](https://github.com/aptos-labs/aptos-core/blob/30b385bf38d3dc8c4e8ee0ff045bc5d0d2f67a85/config/src/config/mempool_config.rs#L8). The current buckets are `[0, 150, 300, 500, 1000, 3000, 5000, 10000, 100000, 1000000]`. Therefore, a `gas_unit_price` of 150 and 299 would be prioritized nearly the same. -::: - -:::tip -Note that the `safety factor` only takes into consideration changes related to execution and IO. Unexpected creation of storage slots may not be sufficiently covered. -::: diff --git a/developer-docs-site/docs/concepts/governance.md b/developer-docs-site/docs/concepts/governance.md deleted file mode 100644 index 619d81ad85ce8..0000000000000 --- a/developer-docs-site/docs/concepts/governance.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -title: "Governance" -slug: "governance" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Governance - -The Aptos on-chain governance is a process by which the Aptos community members can create and vote on proposals that minimize the cost of blockchain upgrades. The following describes the scope of these proposals for the Aptos on-chain governance: - -- Changes to the blockchain parameters, for example, the epoch duration, and the minimum required and maximum allowed validator stake. -- Changes to the core blockchain code. -- Upgrades to the Aptos Framework modules for fixing bugs or for adding or enhancing the Aptos blockchain functionality. -- Deploying new framework modules (at the address `0x1` - `0xa`). - -## How a proposal becomes ready to be resolved - -See below for a summary description of how a proposal comes to exist and when it becomes ready to be resolved: - - - -- The Aptos community can suggest an Aptos Improvement Proposal (AIP) in the [Aptos Foundation AIP GitHub](https://github.com/aptos-foundation/AIPs). -- When appropriate, an on-chain proposal can be created for the AIP via the `aptos_governance` module. -- Voters can then vote on this proposal on-chain via the `aptos_governance` module. If there is sufficient support for a proposal, then it can be resolved. -- Governance requires a minimal number of votes to be cast by an expiration threshold. However, if sufficient votes, more than 50% of the total supply, are accumulated prior to that threshold, the proposal can be executed **without waiting for the full voting period**. - -## Who can propose - -- To either propose or vote, you must stake, but you are not required to run a validator node. However, we recommend that you run validator with a stake as part of the validator set to gain rewards from your stake. -- To create a proposal, the proposer's backing stake pool must have the minimum required proposer stake. The proposer's stake must be locked up for at least as long as the proposal's voting period. This is to avoid potential spammy proposals. -- Proposers can create a proposal by calling [`aptos_governance::create_proposal`](https://github.com/aptos-labs/aptos-core/blob/27a255ebc662817944435349afc4ec33ea317e64/aptos-move/framework/aptos-framework/sources/aptos_governance.move#L183). - -## Who can vote - -- To vote, you must stake, though you are not required to run a validator node. Your voting power is derived from the backing stake pool. -- Voting power is calculated based on the current epoch's active stake of the proposer or voter's backing stake pool. In addition, the stake pool's lockup must be at least as long as the proposal's duration. -- Verify proposals before voting. Ensure each proposal is linked to its source code, and if there is a corresponding AIP, the AIP is in the title and description. - -:::tip -Each stake pool can be used to vote on each proposal exactly only one time. -::: - -## Who can resolve -- Anyone can resolve an on-chain proposal that has passed voting requirements by using the `aptos governance execute-proposal` command from Aptos CLI. - -## Aptos Improvement Proposals (AIPs) - -AIPs are proposals created by the Aptos community or the Aptos Labs team to improve the operations and development of the Aptos chain. -To submit an AIP, create an issue in [`Aptos Foundation's GitHub repository`](https://github.com/aptos-foundation/AIPs/issues) using the [template](https://github.com/aptos-foundation/AIPs/blob/main/TEMPLATE.md) -To keep up with new AIPs, check the `#aip-announcements` channel on [Aptos' discord channel](https://discord.gg/aptosnetwork). -To view and vote on on-chain proposals, go to [`Aptos' Governance website`](https://governance.aptosfoundation.org/). - -## Technical Implementation of Aptos Governance -The majority of the governance logic is in [`aptos_governance.move and voting.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources). -The `aptos_governance` module outlines how users can interact with Aptos Governance. It's the external-facing module of the Aptos on-chain governance process and contains logic and checks that are specific to Aptos Governance. -The `voting` module is the Aptos governance standard that can be used by DAOs on the Aptos chain to create their own on-chain governance process. - -If you are thinking about creating a DAO on Aptos, you can refer to `aptos_governance`'s usage of the `voting` module as an example. -In `aptos_governance`, we rely on the `voting` module to create, vote on, and resolve a proposal. -- `aptos_governance::create_proposal` calls `voting::create_proposal` to create a proposal on-chain, when an off-chain AIP acquires sufficient importance. -- `aptos_governance::vote` calls `voting::vote` to record the vote on a proposal on-chain; -- `aptos_governance::resolve` can be called by anyone. It calls `voting::resolve` to resolve the proposal on-chain. diff --git a/developer-docs-site/docs/concepts/index.md b/developer-docs-site/docs/concepts/index.md deleted file mode 100644 index cfa3c6857979c..0000000000000 --- a/developer-docs-site/docs/concepts/index.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -title: "Learn about Aptos" ---- - -# Learn Aptos Concepts - -Start here to get into the core concepts of the Aptos blockchain. Then review our [research papers](https://aptoslabs.com/research) and the Aptos source code found in the [Aptos-core](https://github.com/aptos-labs/aptos-core) repository of GitHub while continuing your journey through this site. The source contains READMEs and code comments invaluable to developing on Aptos. - -- ### [Aptos White Paper](../aptos-white-paper/index.md) -- ### [Aptos Blockchain Deep Dive](./blockchain.md) -- ### [Move - A Web3 Language and Runtime](./move.md) -- ### [Accounts](./accounts.md) -- ### [Resources](./resources.md) -- ### [Events](./events.md) -- ### [Transactions and States](./txns-states.md) -- ### [Gas and Transaction Fees](./gas-txn-fee.md) -- ### [Computing Transaction Gas](./base-gas.md) -- ### [Blocks](./blocks.md) -- ### [Staking](./staking.md) -- ### [Governance](./governance.md) diff --git a/developer-docs-site/docs/concepts/move.md b/developer-docs-site/docs/concepts/move.md deleted file mode 100644 index ded1944f05c7f..0000000000000 --- a/developer-docs-site/docs/concepts/move.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "Move - A Web3 Language and Runtime" -slug: "move-on-aptos" ---- - -# Move - A Web3 Language and Runtime - -The Aptos blockchain consists of validator nodes that run a consensus protocol. The consensus protocol agrees upon the ordering of transactions and their output when executed on the Move Virtual Machine (MoveVM). Each validator node translates transactions along with the current blockchain ledger state as input into the VM. The MoveVM processes this input to produce a changeset or storage delta as output. Once consensus agrees and commits to the output, it becomes publicly visible. In this guide, we will introduce you to core Move concepts and how they apply to developing on Aptos. - -## What is Move? - -Move is a safe and secure programming language for Web3 that emphasizes **scarcity** and **access control**. Any assets in Move can be represented by or stored within *resource*. **Scarcity** is enforced by default as structs cannot be accidentally duplicated or dropped. Only structs that have explicitly been defined at the bytecode layer as *copy* can be duplicated and *drop* can be dropped, respectively. - -**Access control** comes from both the notion of accounts as well as module access privileges. A module in Move may either be a library or a program that can create, store, or transfer assets. Move ensures that only public module functions may be accessed by other modules. Unless a struct has a public constructor, it can only be constructed within the module that defines it. Similarly, fields within a struct can only be accessed and mutated within its module that or via public accessors and setters. Furthermore, structs defined with *key* can be stored and read from global storage only within the module defines it. Structs with *store* can be stored within another *store* or *key* struct inside or outside the module that defines that struct. - -In Move, a transaction's sender is represented by a *signer*, a verified owner of a specific account. The signer has the highest level of permission in Move and is the only entity capable of adding resources into an account. In addition, a module developer can require that a signer be present to access resources or modify assets stored within an account. - -## Comparison to other VMs - -| | Aptos / Move | Solana / SeaLevel | EVM | Sui / Move | -|---|---|---|---|---| -| Data storage | Stored at a global address or within the owner's account | Stored within the owner's account associated with a program | Stored within the account associated with a smart contract | Stored at a global address | -| Parallelization | Capable of inferring parallelization at runtime within Aptos | Requires specifying all data accessed | Currently serial nothing in production | Requires specifying all data accessed | -| Transaction safety | Sequence number | Transaction uniqueness | nonces, similar to sequence numbers | Transaction uniqueness | -| Type safety | Module structs and generics | Program structs | Contract types | Module structs and generics | -| Function calling | Static dispatch | Static dispatch | Dynamic dispatch | Static dispatch | -| Authenticated Storage | [Yes](../reference/glossary.md#merkle-trees) | No | Yes | No | -| Object global accessibility | Yes | Not applicable | Not applicable | No, can be placed in other objects | - -## Aptos Move features - -Each deployment of the MoveVM has the ability to extend the core MoveVM with additional features via an adapter layer. Furthermore, MoveVM has a framework to support standard operations much like a computer has an operating system. - -The Aptos Move adapter features include: -* [Move Objects](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-10.md) that offer an extensible programming model for globally access to heterogeneous set of resources stored at a single address on-chain. -* [Cryptography primitives](../move/move-on-aptos/cryptography) for building scalable, privacy-preserving dapps. -* [Resource accounts](../move/move-on-aptos/resource-accounts) that offer programmable accounts on-chain, which can be useful for DAOs (decentralized autonomous organizations), shared accounts, or building complex applications on-chain. -* [Tables](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/table.move) for storing key, value data within an account at scale. -* Parallelism via [Block-STM](https://medium.com/aptoslabs/block-stm-how-we-execute-over-160k-transactions-per-second-on-the-aptos-blockchain-3b003657e4ba) that enables concurrent execution of transactions without any input from the user. -* Multi-agent framework that enables a single transaction to be submitted with multiple distinct `signer` entities. - -The Aptos framework ships with many useful libraries: -* A [Aptos Token Objects](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/aptos-token-objects/sources) as defined in [AIP-11](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-1.md) and [AIP-22](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-22.md) that makes it possible to create interoperable NFTs with either lightweight smart contract development or none at all. -* A [Coin standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move) that makes it possible to create type-safe Coins by publishing a trivial module. -* A [Fungible asset standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/fungible_asset.move) as defined in [AIP-21](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-21.md) to moderninze the coin concept with better programmability and controls. -* A [staking](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/staking_contract.move) and [delegation](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/delegation_pool.move) framework. -* A [`type_of`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/type_info.move) service to identify at run-time the address, module, and struct name of a given type. -* A [timestamp service](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/timestamp.move) that provides a monotonically increasing clock that maps to the actual current unixtime. - -With updates frequently. - -## More Resources - -Developers can begin their journey in Move by heading over to our [Move developer page](../move/move-on-aptos.md). diff --git a/developer-docs-site/docs/concepts/node-networks-sync.md b/developer-docs-site/docs/concepts/node-networks-sync.md deleted file mode 100755 index 1cc7e706c592b..0000000000000 --- a/developer-docs-site/docs/concepts/node-networks-sync.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: "Node Networks and Sync" -slug: "node-networks-sync" ---- - -# Node Networks and Synchronization - -Validator nodes and fullnodes form a hierarchical structure with validator nodes at the root and fullnodes everywhere else. The Aptos blockchain distinguishes two types of fullnodes: validator fullnodes and public fullnodes. Validator fullnodes connect directly to validator nodes and offer scalability alongside DDoS mitigation. Public fullnodes connect to validator fullnodes (or other public fullnodes) to gain low-latency access to the Aptos network. - -![v-fn-network.svg](../../static/img/docs/v-fn-network.svg) - -## Node types - -Aptos operates with these node types: - -* [Validator nodes (VNs)](../nodes/validator-node/index.md) - participates in consensus and drives [transaction processing](../concepts/txns-states.md). -* Validator fullnodes (VFNs) - captures and keeps up-to-date on the state of the blockchain; run by the validator operator, so it can connect directly to the validator node and therefore serve requests from public fullnodes. Otherwise, it works like a public fullnode. -* [Public fullnodes (PFNs)](../nodes/full-node/index.md) - run by someone who is not a validator operator, PFNs cannot connect directly to a validator node and therefore rely upon VFNs for synchronization. -* [Archival nodes (ANs)](../guides/state-sync.md#running-archival-nodes) - is a fullnode that contains all blockchain data since the start of the blockchain's history. - -## Separate network stacks -The Aptos blockchain supports distinct networking stacks for various network topologies. For example, the validator network is independent of the fullnode network. The advantages of having separate network stacks include: -* Clean separation between the different networks. -* Better support for security preferences (e.g., bidirectional vs server authentication). -* Allowance for isolated discovery protocols (i.e., on-chain discovery for validator node's public endpoints vs. manual configuration for private organizations). - -# Node synchronization -Aptos nodes synchronize to the latest state of the Aptos blockchain through two mechanisms: consensus or state synchronization. Validator nodes will use both consensus and state synchronization to stay up-to-date, while fullnodes use only state synchronization. - -For example, a validator node will invoke state synchronization when it comes online for the first time or reboots (e.g., after being offline for a while). Once the validator is up-to-date with the latest state of the blockchain it will begin participating in consensus and rely exclusively on consensus to stay up-to-date. Fullnodes, however, continuously rely on state synchronization to get and stay up-to-date as the blockchain grows. - -## State synchronizer - -Each Aptos node contains a [State Synchronizer](../guides/state-sync.md) component which is used to synchronize the state of the node with its peers. This component has the same functionality for all types of Aptos nodes: it utilizes the dedicated peer-to-peer network to continuously request and disseminate blockchain data. Validator nodes distribute blockchain data within the validator node network, while fullnodes rely on other fullnodes (i.e., validator nodes or public fullnodes). - diff --git a/developer-docs-site/docs/concepts/resources.md b/developer-docs-site/docs/concepts/resources.md deleted file mode 100644 index 25e25de47574d..0000000000000 --- a/developer-docs-site/docs/concepts/resources.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -title: "Resources" -id: "resources" ---- - -# Resources - -On Aptos, on-chain state is organized into resources and modules. These are then stored within the individual accounts. This is different from other blockchains, such as Ethereum, where each smart contract maintains their own storage space. See [Accounts](./accounts.md) for more details on accounts. - -## Resources vs Instances - -Move modules define struct definitions. Struct definitions may include the abilities such as `key` or `store`. Resources are struct instance with The `key` ability that are stored in global storage or directly in an account. The `store` ability allows struct instances to be stored within resources. An example here is how the APT coin is stored: CoinStore is the resource that contains the APT coin, while the Coin itself is an instance: - -```rust -/// A holder of a specific coin type and associated event handles. -/// These are kept in a single resource to ensure locality of data. -struct CoinStore has key { - coin: Coin, -} - -/// Main structure representing a coin/token in an account's custody. -struct Coin has store { - /// Amount of coin this address has. - value: u64, -} -``` - -The Coin instance can be taken out of CoinStore with the owning account's permission and easily transferred to another CoinStore resource. It can also be kept in any other custom resource, if the definition allows, for example: - -```rust -struct CustomCoinBox has key { - coin: Coin, -} -``` - -## Define resources and objects - -All instances and resources are defined within a module that is stored at an address. For example `0x1234::coin::Coin<0x1234::coin::SomeCoin>` would be represented as: - -``` -module 0x1234::coin { - struct CoinStore has key { - coin: Coin, - } - - struct SomeCoin { } -} -``` - -In this example, `0x1234` is the address, `coin` is the module, `Coin` is a struct that can be stored as a resource, and `SomeCoin` is a struct that is unlikely to ever be represented as an instance. The use of the phantom type allows for there to exist many distinct types of `CoinStore` resources with different `CoinType` parameters. - -## Permissions of Instances including Resources - -Permissions of resources and other instances are dictated by the module where the struct is defined. For example, an instance within a resource may be accessed and even removed from the resource, but the internal state cannot be changed without permission from the module where the instance's struct is defined. - -Ownership, on the other hand, is signified by either storing a resource under an account or by logic within the module that defines the struct. - -## Viewing a resource - -Resources are stored within accounts. Resources can be located by searching within the owner's account for the resource at its full query path inclusive of the account where it is stored as well as its address and module. Resources can be viewed on the [Aptos Explorer](https://explorer.aptoslabs.com/) by searching for the owning account or be directly fetched from a fullnode's API. - -## How resources are stored - -The module that defines a struct specifies how instances may be stored. For example, events for depositing a token can be stored in the receiver account where the deposit happens or in the account where the token module is deployed. In general, storing data in individual user accounts enables a higher level of execution efficiency as there would be no state read/write conflicts among transactions from different accounts, allowing for seamless parallel execution. diff --git a/developer-docs-site/docs/concepts/staking.md b/developer-docs-site/docs/concepts/staking.md deleted file mode 100644 index 60ca55d6e485c..0000000000000 --- a/developer-docs-site/docs/concepts/staking.md +++ /dev/null @@ -1,294 +0,0 @@ ---- -title: "Staking" -slug: "staking" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Staking - -:::tip Consensus -We strongly recommend that you read the consensus section of [Aptos Blockchain Deep Dive](./blockchain.md#consensus) before proceeding further. -::: - -In a distributed system like blockchain, executing a transaction is distinct from updating the state of the ledger and persisting the results in storage. An agreement, i.e., consensus, must be reached by a quorum of validators on the ordering of transactions and their execution results before these results are persisted in storage and the state of the ledger is updated. - -Anyone can participate in the Aptos consensus process, if they stake sufficient utility coin, i.e., place their utility coin into escrow. To encourage validators to participate in the consensus process, each validator's vote weight is proportional to the amount of validator's stake. In exchange, the validator is rewarded proportionally to the amount staked. Hence, the performance of the blockchain is aligned with the validator's interest, i.e., rewards. - -:::note -Currently, slashing is not implemented. -::: - -The current on-chain data can be found in [`staking_config::StakingConfig`](https://mainnet.aptoslabs.com/v1/accounts/0x1/resource/0x1::staking_config::StakingConfig). The configuration set is defined in [`staking_config.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/configs/staking_config.move). - -The rest of this document presents how staking works on the Aptos blockchain. See [Supporting documentation](#supporting-documentation) at the bottom for related resources. - -## Staking on the Aptos blockchain - - - -The Aptos staking module defines a capability that represents ownership. - -:::tip Ownership -See the `OwnerCapability` defined in [stake.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/stake.move). -::: - -The `OwnerCapability` resource can be used to control the stake pool. Three personas are supported: -- Owner -- Operator -- Voter - -Using this owner-operator-voter model, a custodian can assume the owner persona and stake on the Aptos blockchain and participate in the Aptos governance. This model allows delegations and staking services to be built as it separates the account that is control of the funds from the other accounts (operator, voter), hence allows secure delegations of responsibilities. - -This section describes how this works, using Bob and Alice in the example. - -### Owner - -The owner is the owner of the funds. For example, Bob creates an account on the Aptos blockchain. Now Bob has the `OwnerCapability` resource. Bob can assign his account’s operator address to the account of Alice, a trusted node operator, to appoint Alice as a validator. - -As an owner: - -- Bob owns the funds that will be used for staking. -- Only Bob can add, unlock or withdraw funds. -- Only Bob can extend the lockup period. -- Bob can change the node operator Alice to some other node operator anytime Bob wishes to do so. -- Bob can set the operator commission percentage. -- The reward will be deposited into Bob's (owner's) account. - -### Operator - -A node operator is assigned by the fund owner to run the validator node and receives commission as set by the owner. The two personas, the owner and the operator, can be two separate entities or the same. For example, Alice (operator) runs the validator node, operating at the behest of Bob, the fund owner. - -As an operator: - -- Alice has permissions only to join or leave the validator set. -- As a validator, Alice will perform the validating function. -- Alice has the permissions to change the consensus key and network addresses. The consensus key is used by Alice to participate in the validator consensus process, i.e., to vote and propose a block. Alice is allowed to change ("rotate") this key in case this key is compromised. -- However, Alice cannot move funds (unless Alice is the owner, i.e., Alice has the `OwnerCapability` resource). -- The operator commission is deducted from the staker (owner) rewards and deposited into the operator account. - -### Voter - -An owner can designate a voter. This enables the voter to participate in governance. The voter will use the voter key to sign the governance votes in the transactions. - -:::tip Governance -This document describes staking. See [Governance](./governance.md) for how to participate in the Aptos on-chain governance using the owner-voter model. -::: - -## Validation on the Aptos blockchain - -Throughout the duration of an epoch, the following flow of events occurs several times (thousands of times): - -- A validator leader is selected by a deterministic formula based on the validator reputation determined by validator's performance (including whether the validator has voted in the past or not) and stake. **This leader selection is not done by voting.** -- The selected leader sends a proposal containing the collected quorum votes of the previous proposal and the leader's proposed order of transactions for the new block. -- All the validators from the validator set will vote on the leader's proposal for the new block. Once consensus is reached, the block can be finalized. Hence, the actual list of votes to achieve consensus is a subset of all the validators in the validator set. This leader validator is rewarded. **Rewards are given only to the leader validator, not to the voter validators.** -- The above flow repeats with the selection of another validator leader and repeating the steps for the next new block. Rewards are given at the end of the epoch. - -## Validator state and stake state - -States are defined for a validator and the stake. - -- **Validator state:** A validator can be in any one of these four states. Moreover, the validator can go from inactive (not tracked in the validator set anywhere) state to any one of the other three states: - - inactive - - pending_active. - - active. - - pending_inactive. -- **Stake state:** A validator in pending_inactive or active state, can have their stake in either of these four states: - - inactive. - - pending_active. - - active. - - pending_inactive. - - These stake states are applicable for the existing validators in the validator set adding or removing their stake. - -### Validator states - - - -There are two edge cases to call out: -1. If a validator's stake drops below the required [minimum](#minimum-and-maximum-stake), that validator will be moved from active state directly to the inactive state during an epoch change. This happens only during an epoch change. -2. Aptos governance can also directly remove validators from the active set. **Note that governance proposals will always trigger an epoch change.** - -### Stake state - -The state of stake has more granularity than that of the validator; additional stake can be added and a portion of stake removed from an active validator. - - - -### Validator ruleset - -The below ruleset is applicable during the changes of state: - -- Voting power can change (increase or decrease) only on epoch boundary. -- A validator’s consensus key and the validator and validator fullnode network addresses can change only on epoch boundary. -- Pending inactive stake cannot be moved into inactive (and thus withdrawable) until before lockup expires. -- No validators in the active validator set can have their stake below the minimum required stake. - -## Validator flow - -:::tip Staking pool operations -See [Staking pool operations](../nodes/validator-node/operator/staking-pool-operations.md) for the correct sequence of commands to run for the below flow. -::: - -1. Owner initializes the stake pool with `aptos stake create-staking-contract`. -2. When the owner is ready to deposit the stake (or have funds assigned by a staking service in exchange for ownership capability), owner calls `aptos stake add-stake`. -3. When the validator node is ready, the operator can call `aptos node join-validator-set` to join the active validator set. Changes will be effective in the next epoch. -4. Validator validates (proposes blocks as a leader-validator) and gains rewards. The stake will automatically be locked up for a fixed duration (set by governance) and automatically renewed at expiration. -5. At any point, if the operator wants to update the consensus key or validator network addresses, they can call `aptos node update-consensus-key` or `aptos node update-validator-network-addresses`. Similar to changes to stake, the changes to consensus key or validator network addresses are only effective in the next epoch. -6. Validator can request to unlock their stake at any time. However, their stake will only become withdrawable when their current lockup expires. This can be at most as long as the fixed lockup duration. -7. After exiting, the validator can either explicitly leave the validator set by calling `aptos node leave-validator-set` or if their stake drops below the min required, they would get removed at the end of the epoch. -8. Validator can always rejoin the validator set by going through steps 2-3 again. -9. An owner can always switch operators by calling `aptos stake set-operator`. -10. An owner can always switch designated voter by calling `aptos stake set-delegated-voter`. - -## Joining the validator set - -Participating as a validator node on the Aptos network works like this: - -1. Operator runs a validator node and configures the on-chain validator network addresses and rotates the consensus key. -2. Owner deposits her Aptos coins funds as stake, or have funds assigned by a staking service. The stake must be at least the minimum amount required. -3. **The validator node cannot sync until the stake pool becomes active.** -4. Operator validates and gains rewards. -5. The staked pool is automatically be locked up for a fixed duration (set by the Aptos governance) and will be automatically renewed at expiration. You cannot withdraw any of your staked amount until your lockup period expires. See [stake.move#L728](https://github.com/aptos-labs/aptos-core/blob/00a234cc233b01f1a7e1680f81b72214a7af91a9/aptos-move/framework/aptos-framework/sources/stake.move#L728). -6. Operator must wait until the new epoch starts before their validator becomes active. - -:::tip Joining the validator set -For step-by-step instructions on how to join the validator set, see: [Joining Validator Set](../nodes/validator-node/operator/staking-pool-operations.md#joining-validator-set). -::: - -### Minimum and maximum stake - -You must stake the required minimum amount to join the validator set. Moreover, you can only stake up to the maximum stake amount. The current required minimum for staking is 1M APT tokens and the maximum is 50M APT tokens. - -If at any time after joining the validator set, your current staked amount exceeds the maximum allowed stake (for example as the rewards are added to your staked amount), then your voting power and the rewards will be calculated only using the maximum allowed stake amount, and not your current staked amount. - -The owner can withdraw part of the stake and leave their balance below the required minimum. In such case, their stake pool will be removed from the validator set when the next epoch starts. - -### Automatic lockup duration - -When you join the validator set, your stake will automatically be locked up for a fixed duration that is set by the Aptos governance. - -### Automatic lockup renewal - -When your lockup period expires, it will be automatically renewed, so that you can continue to validate and receive the rewards. - -### Unlocking your stake - -You can request to unlock your stake at any time. However, your stake will only become withdrawable when your current lockup expires. This can be at most as long as the fixed lockup duration. You will continue earning rewards on your stake until it becomes withdrawable. - -The principal amount is updated when any of the following actions occur: -1. Operator [requests commission unlock](../nodes/validator-node/operator/staking-pool-operations.md#requesting-commission) -2. Staker (owner) withdraws funds -3. Staker (owner) switches operators - -When the staker unlocks stake, this also triggers a commission unlock. The full commission amount for any staking rewards earned is unlocked. This is not proportional to the unlock stake amount. Commission is distributed to the operator after the lockup ends when `request commission` is called a second time or when staker withdraws (distributes) the unlocked stake. - -### Resetting the lockup - -When the lockup period expires, it is automatically renewed by the network. However, the owner can explicitly reset the lockup. - -:::tip Set by the governance - -The lockup duration is decided by the Aptos governance, i.e., by the covenants that the Aptos community members vote on, and not by any special entity like the Aptos Labs. -::: - -## Epoch - -An epoch in the Aptos blockchain is defined as a duration of time, in seconds, during which a number of blocks are voted on by the validators, the validator set is updated, and the rewards are distributed to the validators. - -:::tip Epoch on Mainnet -The Aptos mainnet epoch is set as 7200 seconds (two hours). -::: - -### Triggers at the epoch start - -:::tip -See the [Triggers at epoch boundary section of `stake.move`](https://github.com/aptos-labs/aptos-core/blob/256618470f2ad7d89757263fbdbae38ac7085317/aptos-move/framework/aptos-framework/sources/stake.move#L1036) for the full code. -::: - -At the start of each epoch, the following key events are triggered: - -- Update the validator set by adding the pending active validators to the active validators set and by removing the pending inactive validators from the active validators set. -- Move any pending active stake to active stake, and any pending inactive stake to inactive stake. -- The staking pool's voting power in this new epoch is updated to the total active stake. -- Automatically renew a validator's lockup for the validators who will still be in the validator set in the next epoch. -- The voting power of each validator in the validator set is updated to be the corresponding staking pool's voting power. -- Rewards are distributed to the validators that participated in the previous epoch. - -## Rewards - -Rewards for staking are calculated by using: - -1. The `rewards_rate`, an annual percentage yield (APY), i.e., rewards accrue as a compound interest on your current staked amount. -2. Your staked amount. -3. Your proposer performance in the Aptos governance. - -:::tip Rewards rate -The `rewards_rate` is set by the Aptos governance. Also see [Validation on the Aptos blockchain](#validation-on-the-aptos-blockchain). -::: - -### Rewards formula - -See below the formula used to calculate rewards to the validator: - -``` -Reward = staked_amount * rewards_rate per epoch * (Number of successful proposals by the validator / Total number of proposals made by the validator) -``` - -### Rewards paid every epoch - -Rewards are paid every epoch. Any reward you (i.e., validator) earned at the end of current epoch is added to your staked amount. The reward at the end of the next epoch is calculated based on your increased staked amount (i.e., original staked amount plus the added reward), and so on. - -### Rewards based on the proposer performance - -The validator rewards calculation uses the validator's proposer performance. Once you are in the validator set, you can propose in every epoch. The more successfully you propose, i.e., your proposals pass, the more rewards you will receive. - -Note that rewards are given only to the **leader-validators**, i.e., validators who propose the new block, and not to the **voter-validators** who vote on the leader's proposal for the new block. See [Validation on the Aptos blockchain](#validation-on-the-aptos-blockchain). - -:::tip Rewards are subject to lockup period -All the validator rewards are also subject to lockup period as they are added to the original staked amount. -::: - -## Leaving the validator set - -:::tip -See the Aptos Stake module in the Move language at [stake.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/stake.move). -::: - -- At any time you can call the following sequence of functions to leave the validator set: - - Call `Stake::unlock` to unlock your stake amount, and - - Either call `Stake::withdraw` to withdraw your staked amount at the next epoch, or call `Stake::leave_validator_set`. - -## Rejoining the validator set - -When you leave a validator set, you can rejoin by depositing the minimum required stake amount. - -## Supporting documentation - -* [Current on-chain data](https://mainnet.aptoslabs.com/v1/accounts/0x1/resource/0x1::staking_config::StakingConfig) -* [Staking Pool Operations](../nodes/validator-node/operator/staking-pool-operations.md) -* [Delegation Pool Operations](../nodes/validator-node/operator/delegation-pool-operations.md) -* [Configuration file `staking_config.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/configs/staking_config.move) -* [Contract file `staking_contract.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/staking_contract.move) covering requesting commissions -* [All staking-related `.move files](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/aptos-framework/sources) diff --git a/developer-docs-site/docs/concepts/txns-states.md b/developer-docs-site/docs/concepts/txns-states.md deleted file mode 100755 index 71e5a0ff44280..0000000000000 --- a/developer-docs-site/docs/concepts/txns-states.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: "Transactions and States" -slug: "txns-states" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Transactions and States - -The Aptos blockchain stores three types of data: - -* **Transactions**: Transactions represent an intended operation being performed by an account on the blockchain (e.g., transferring assets). -* **States**: The (blockchain ledger) state represents the accumulation of the output of execution of transactions, the values stored within all [resources](./resources). -* [**Events**](./events.md): Ancillary data published by the execution of a transaction. - -:::tip -Only transactions can change the ledger state. -::: - -## Transactions - -Aptos transactions contain information such as the sender’s account address, authentication from the sender, the desired operation to be performed on the Aptos blockchain, and the amount of gas the sender is willing to pay to execute the transaction. - -### Transaction states - -A transaction may end in one of the following states: - -* Committed on the blockchain and executed. This is considered as a successful transaction. -* Committed on the blockchain and aborted. The abort code indicates why the transaction failed to execute. -* Discarded during transaction submission due to a validation check such as insufficient gas, invalid transaction format, or incorrect key. -* Discarded after transaction submission but before attempted execution. This could be caused by timeouts or insufficient gas due to other transactions affecting the account. - -The sender’s account will be charged gas for any committed transactions. - -During transaction submission, the submitter is notified of successful submission or a reason for failing validations otherwise. - -A transaction that is successfully submitted but ultimately discarded may have no visible state in any accessible Aptos node or within the Aptos network. A user can attempt to resubmit the same transaction to re-validate the transaction. If the submitting node believes that this transaction is still valid, it will return an error stating that an identical transaction has been submitted. - -The submitter can try to increase the gas cost by a trivial amount to help make progress and adjust for whatever may have been causing the discarding of the transaction further downstream. - -:::tip Read more -See [Aptos Blockchain Deep Dive](./blockchain.md) for a comprehensive description of the Aptos transaction lifecycle. -::: - -### Contents of a Transaction - -A signed transaction on the blockchain contains the following information: - -- **Signature**: The sender uses a digital signature to verify that they signed the transaction (i.e., authentication). -- **Sender address**: The sender's [account address](./accounts.md#account-address). -- **Sender public key**: The public authentication key that corresponds to the private authentication key used to sign the transaction. -- **Payload**: Indicates an action or set of actions Alice's behalf. In the case this is a Move function, it directly calls into Move bytecode on the chain. Alternatively, it may be Move bytecode peer-to-peer [transaction script](../reference/glossary.md#transaction-script). It also contains a list of inputs to the function or script. For this example, it is a function call to transfer an amount of Aptos Coins from Alice account to Bob's account, where Alice's account is implied by sending the transaction and Bob's account and the amount are specified as transaction inputs. -- [**Gas unit price**](../reference/glossary.md#gas-unit-price): The amount the sender is willing to pay per unit of gas, to execute the transaction. This is represented as Octa or units of 10-8 utility tokens. -- [**Maximum gas amount**](../reference/glossary.md#maximum-gas-amount): The maximum gas amount in Aptos utility tokens the sender is willing to pay for this transaction. Gas charges are equal to the base gas cost covered by computation and IO multiplied by the gas price. Gas costs also include storage with an Apt-fixed priced storage model. This is represents as Octa or units of 10-8 Aptos utility tokens. -- **Gas price** (in specified gas units): This is the amount the sender is willing to pay per unit of [gas](./gas-txn-fee.md) to execute the transaction. [Gas](./gas-txn-fee.md) is a way to pay for computation and storage. A gas unit is an abstract measurement of computation with no inherent real-world value. -- **Maximum gas amount**: The [maximum gas amount](./gas-txn-fee.md#gas-and-transaction-fee-on-the-aptos-blockchain) is the maximum gas units the transaction is allowed to consume. -- **Sequence number**: This is an unsigned integer that must be equal to the sender's account [sequence number](./accounts.md#account-sequence-number) at the time of execution. -- **Expiration time**: A timestamp after which the transaction ceases to be valid (i.e., expires). - -### Types of transaction payloads -Within a given transaction, the two most common types of payloads include: - -- An entry point -- [A script (payload)](../move/move-on-aptos/move-scripts) - -Currently the SDKs [Python](https://aptos.dev/sdks/python-sdk) and [Typescript](https://aptos.dev/sdks/ts-sdk/index) support both. This guide points out many of those entry points, such as `coin::transfer` and `aptos_account::create_account`. - -All operations on the Aptos blockchain should be available via entry point calls. While one could submit multiple transactions calling entry points in series, many such operations may benefit from being called atomically from a single transaction. A script payload transaction can call any entry point or public function defined within any module. - -:::tip Read more -See the tutorial on [Your First Transaction](../tutorials/first-transaction.md) for generating valid transactions. -::: - -:::note Transaction generation -The Aptos REST API supports generating BCS-encoded transactions from JSON. This is useful for rapid prototyping, but be cautious using it in Mainnet as this places a lot of trust on the fullnode generating the transaction. -::: - -## States - -The Aptos blockchain's ledger state, or global state, represents the state of all accounts in the Aptos blockchain. Each validator node in the blockchain must know the latest version of the global state to execute any transaction. - -Anyone can submit a transaction to the Aptos blockchain to modify the ledger state. Upon execution of a transaction, a transaction output is generated. A transaction output contains zero or more operations to manipulate the ledger state called **write sets** emitting a vector of resulting events, the amount of gas consumed, and the executed transaction status. - -### Proofs - -The Aptos blockchain uses proof to verify the authenticity and correctness of the blockchain data. - -Data within the Aptos blockchain is replicated across the network. Each validator and fullnode's [storage](./validator-nodes#storage) is responsible for persisting the agreed upon blocks of transactions and their execution results to the database. - -The blockchain is represented as an ever-growing [Merkle tree](../reference/glossary.md#merkle-trees), where each leaf appended to the tree represents a single transaction executed by the blockchain. - -All operations executed by the blockchain and all account states can be verified cryptographically. These cryptographic proofs ensure that: -- The validator nodes agree on the state. -- The client does not need to trust the entity from which it is receiving data. For example, if a client fetches the last **n** transactions from an account, a proof can attest that no transactions were added, omitted or modified in the response. The client may also query for the state of an account, ask whether a specific transaction was processed, and so on. - -### Versioned database - -The ledger state is versioned using an unsigned 64-bit integer corresponding to the number of transactions the system has executed. This versioned database allows the validator nodes to: - -- Execute a transaction against the ledger state at the latest version. -- Respond to client queries about ledger history at both current and previous versions. - -## Transactions change ledger state - - - -The above figure shows how executing transaction T*i* changes the state of the Aptos blockchain from S*i-1* to S*i*. - -In the figure: - -- Accounts **A** and **B**: Represent Alice's and Bob's accounts on the Aptos blockchain. -- **S*i-1*** : Represents the (*i-1*)-the state of the blockchain. In this state, Alice's account **A** has a balance of 110 APT (Aptos coins), and Bob's account **B** has a balance of 52 APT. -- **T*i*** : This is the *i*-th transaction executed on the blockchain. In this example, it represents Alice sending 10 APT to Bob. -- **Apply()**: This is a deterministic function that always returns the same final state for a specific initial state and a specific transaction. If the current state of the blockchain is **S*i-1***, and transaction **T*i*** is executed on the state **S*i-1***, then the new state of the blockchain is always **S*i***. The Aptos blockchain uses the [Move language](../move/book/SUMMARY.md) to implement the deterministic execution function **Apply()**. -- **S*i*** : This is the *i*-the state of the blockchain. When the transaction **T*i*** is applied to the blockchain, it generates the new state **S*i*** (an outcome of applying **Apply(S*i-1*, T*i*)** to **S*i-1*** and **T*i***). This causes Alice’s account balance to be reduced by 10 to 100 APT and Bob’s account balance to be increased by 10 to 62 APT. The new state **S*i*** shows these updated balances. diff --git a/developer-docs-site/docs/concepts/validator-nodes.md b/developer-docs-site/docs/concepts/validator-nodes.md deleted file mode 100755 index 36f836a258624..0000000000000 --- a/developer-docs-site/docs/concepts/validator-nodes.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: "Validator Nodes Overview" -slug: "validator-nodes" ---- -import BlockQuote from "@site/src/components/BlockQuote"; - -An Aptos node is an entity of the Aptos ecosystem that tracks the state of the Aptos blockchain. Clients interact with the blockchain via Aptos nodes. There are two types of nodes: -* Validator nodes -* [Fullnodes](./fullnodes.md) - -Each Aptos node comprises several logical components: -* [REST service](../reference/glossary.md#rest-service) -* [Mempool](#mempool) -* [Consensus (disabled in fullnodes)](#consensus) -* [Execution](#execution) -* [Virtual Machine](#virtual-machine-vm) -* [Storage](#storage) -* [State synchronizer](#state-synchronizer) - -The [Aptos-core](../reference/glossary.md#aptos-core) software can be configured to run as a validator node or as a fullnode. - -# Overview - -When a transaction is submitted to the Aptos blockchain, validator nodes run a distributed [consensus protocol](../reference/glossary.md#consensus-protocol), execute the transaction, and store the transaction and the execution results on the blockchain. Validator nodes decide which transactions will be added to the blockchain and in which order. - -The Aptos blockchain uses a Byzantine Fault Tolerance (BFT) consensus protocol for validator nodes to agree on the ledger of finalized transactions and their execution results. Validator nodes process these transactions and include them in their local copy of the blockchain database. This means that up-to-date validator nodes always maintain a copy of the current [state](../reference/glossary.md#state) of the blockchain, locally. - -Validator nodes communicate directly with other validator nodes over a private network. [Fullnodes](./fullnodes.md) are an external validation and/or dissemination resource for the finalized transaction history. They receive transactions from peers and may re-execute them locally (the same way a validator executes transactions). Fullnodes store the results of re-executed transactions to local storage. In doing so, they can challenge any foul-play by validators and provide evidence if there is any attempt to re-write or modify the blockchain history. This helps to mitigate against validator corruption and/or collusion. - -
-The AptosBFT consensus protocol provides fault tolerance of up to one-third of malicious validator nodes. -
- -## Validator node components - -![validator.svg](../../static/img/docs/validator.svg) -### Mempool - -Mempool is a component within each node that holds an in-memory buffer of transactions that have been submitted to the blockchain, but not yet agreed upon or executed. This buffer is replicated between validator nodes and fullnodes. - -The JSON-RPC service of a fullnode sends transactions to a validator node's mempool. Mempool performs various checks on the transactions to ensure transaction validity and protect against DOS attacks. When a new transaction passes initial verification and is added to mempool, it is then distributed to the mempools of other validator nodes in the network. - -When a validator node temporarily becomes a leader in the consensus protocol, consensus pulls the transactions from mempool and proposes a new transaction block. This block is broadcasted to other validators and contains a total ordering over all transactions in the block. Each validator then executes the block and submits votes on whether or not to accept the new block proposal. - -### Consensus - -Consensus is the component that is responsible for ordering blocks of transactions and agreeing on the results of execution by participating in the consensus protocol with other validator nodes in the network. - -### Execution - -Execution is the component that coordinates the execution of a block of transactions and maintains a transient state. Consensus votes on this transient state. Execution maintains an in-memory representation of the execution results until consensus commits the block to the distributed database. Execution uses the virtual machine to execute transactions. Execution acts as the glue layer between the inputs of the system (represented by transactions), storage (providing a persistency layer), and the virtual machine (for execution). - -### Virtual machine (VM) - -The virtual machine (VM) is used to run the Move program within each transaction and determine execution results. A node's mempool uses the VM to perform verification checks on transactions, while execution uses the VM to execute transactions. - -### Storage - -The storage component is used to persist agreed upon blocks of transactions and their execution results to the local database. - -### State synchronizer - -Nodes use their state synchronizer component to “catch up” to the latest state of the blockchain and stay up-to-date. diff --git a/developer-docs-site/docs/guides/_category_.json b/developer-docs-site/docs/guides/_category_.json deleted file mode 100644 index e79050872b49e..0000000000000 --- a/developer-docs-site/docs/guides/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Guides", - "position": 2 -} diff --git a/developer-docs-site/docs/guides/account-management/key-rotation.md b/developer-docs-site/docs/guides/account-management/key-rotation.md deleted file mode 100644 index a0f5e2493ac00..0000000000000 --- a/developer-docs-site/docs/guides/account-management/key-rotation.md +++ /dev/null @@ -1,141 +0,0 @@ ---- -title: "Rotating an authentication key" -id: "key-rotation" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -Aptos Move accounts have a public address, an authentication key, a public key, and a private key. The public address is permanent, always matching the account's initial authentication key. - -The Aptos account model facilitates the unique ability to rotate an account's private key. Since an account's address is the *initial* authentication key, the ability to sign for an account can be transferred to another private key without changing its public address. - -In this guide, we show examples for how to rotate an account's authentication key using a few of the various Aptos SDKs. - -Here are the installation links for the SDKs we will cover in this example: - -* [Aptos CLI](../../tools/aptos-cli) -* [Typescript SDK](../../sdks/ts-sdk/index) -* [Python SDK](../../sdks/python-sdk) - -:::warning -Some of the following examples use private keys. Do not share your private keys with anyone. -::: - -## How to rotate an account's authentication key - - - -Run the following to initialize two test profiles. Leave the inputs blank both times you're prompted for a private key. - -```shell title="Initialize two test profiles on devnet" -aptos init --profile test_profile_1 --network devnet --assume-yes -aptos init --profile test_profile_2 --network devnet --assume-yes -``` -```shell title="Rotate the authentication key for test_profile_1 to test_profile_2's authentication key" -aptos account rotate-key --profile test_profile_1 --new-private-key -``` -:::info Where do I view the private key for a profile? -Public, private, and authentication keys for Aptos CLI profiles are stored in `~/.aptos/config.yaml` if your config is set to `Global` and `/.aptos/config.yaml` if it's set to `Workspace`. - -To see your config settings, run `aptos config show-global-config`. -::: - -```shell title="Confirm yes and create a new profile so that you can continue to sign for the resource account" -Do you want to submit a transaction for a range of [52000 - 78000] Octas at a gas unit price of 100 Octas? [yes/no] > -yes -... - -Do you want to create a profile for the new key? [yes/no] > -yes -... - -Enter the name for the profile -test_profile_1_rotated - -Profile test_profile_1_rotated is saved. -``` -You can now use the profile like any other account. - -In your `config.yaml` file, `test_profile_1_rotated` will retain its original public address but have a new public and private key that matches `test_profile_2`. - -The authentication keys aren't shown in the `config.yaml` file, but we can verify the change with the following commands: - -```shell title="Verify the authentication keys are now equal with view functions" -# View the authentication key of `test_profile_1_rotated` -aptos move view --function-id 0x1::account::get_authentication_key --args address:test_profile_1_rotated - -# View the authentication key of `test_profile_2`, it should equal the above. -aptos move view --function-id 0x1::account::get_authentication_key --args address:test_profile_2 -``` - -```json title="Example output from the previous two commands" -{ - "Result": [ - "0x458fba533b84717c91897cab05047c1dd7ac2ea73e75c77281781f5b7fec180c" - ] -} -{ - "Result": [ - "0x458fba533b84717c91897cab05047c1dd7ac2ea73e75c77281781f5b7fec180c" - ] -} -``` - - - - -This program creates two accounts on devnet, Alice and Bob, funds them, then rotates the Alice's authentication key to that of Bob's. - -View the full example for this code [here](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk/examples/typescript/rotate_key.ts). - -The function to rotate is very simple: -```typescript title="Typescript SDK rotate authentication key function" -:!: static/sdks/typescript/examples/typescript-esm/rotate_key.ts rotate_key -``` -Commands to run the example script: -```shell title="Navigate to the typescript SDK directory, install dependencies and run rotate_key.ts" -cd ~/aptos-core/ecosystem/typescript/sdk/examples/typescript-esm -pnpm install && pnpm rotate_key -``` -```shell title="rotate_key.ts output" -Account Address Auth Key Private Key Public Key ------------------------------------------------------------------------------------------------- -Alice 0x213d...031013 '0x213d...031013' '0x00a4...b2887b' '0x859e...08d2a9' -Bob 0x1c06...ac3bb3 0x1c06...ac3bb3 0xf2be...9486aa 0xbbc1...abb808 - -...rotating... - -Alice 0x213d...031013 '0x1c06...ac3bb3' '0xf2be...9486aa' '0xbbc1...abb808' -Bob 0x1c06...ac3bb3 0x1c06...ac3bb3 0xf2be...9486aa 0xbbc1...abb808 -``` - - - -This program creates two accounts on devnet, Alice and Bob, funds them, then rotates the Alice's authentication key to that of Bob's. - -View the full example for this code [here](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/python/sdk/examples/rotate-key.py). - -Here's the relevant code that rotates Alice's keys to Bob's: -```python title="Python SDK rotate authentication key function" -:!: static/sdks/python/examples/rotate_key.py rotate_key -``` -Commands to run the example script: -```shell title="Navigate to the python SDK directory, install dependencies and run rotate_key.ts" -cd ~/aptos-core/ecosystem/python/sdk -poetry install && poetry run python -m examples.rotate-key -``` -```shell title="rotate_key.py output" -Account Address Auth Key Private Key Public Key ------------------------------------------------------------------------------------------------- -Alice 0x213d...031013 '0x213d...031013' '0x00a4...b2887b' '0x859e...08d2a9' -Bob 0x1c06...ac3bb3 0x1c06...ac3bb3 0xf2be...9486aa 0xbbc1...abb808 - -...rotating... - -Alice 0x213d...031013 '0x1c06...ac3bb3' '0xf2be...9486aa' '0xbbc1...abb808' -Bob 0x1c06...ac3bb3 0x1c06...ac3bb3 0xf2be...9486aa 0xbbc1...abb808 -``` - - - diff --git a/developer-docs-site/docs/guides/building-from-source.md b/developer-docs-site/docs/guides/building-from-source.md deleted file mode 100644 index fdfd7a46ee141..0000000000000 --- a/developer-docs-site/docs/guides/building-from-source.md +++ /dev/null @@ -1,176 +0,0 @@ ---- -title: "Building Aptos From Source" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Building Aptos From Source - -[Binary releases are available](../tools/aptos-cli/install-cli/index.md), but if you want to build from source or develop on the Aptos tools, this is how. - -## Supported operating systems - -Aptos can be built on various operating systems, including Linux, macOS. and Windows. Aptos is tested extensively on Linux and macOS, and less so on Windows. Here are the versions we use: - -* Linux - Ubuntu version 20.04 and 22.04 -* macOS - macOS Monterey and later -* Microsoft Windows - Windows 10, 11 and Windows Server 2022+ - -## Clone the Aptos-core repo - - -1. Install [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git). Git is required to clone the aptos-core repo, and will need to be installed prior to continuing. You can install it with the instructions on the official [Git website](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git). - -1. Clone the Aptos repository. To clone the Aptos repository (repo), you first need to open a command line prompt (Terminal on Mac / Linux, Powershell on Windows). Then run the following command to clone the Git repository from GitHub. - - ``` - git clone https://github.com/aptos-labs/aptos-core.git - ``` - -1. Now let's go into the newly created directory `aptos-core` by *changing directory* or `cd`ing into it: - ``` - cd aptos-core - ``` - -### (Optional) Check out release branch - -Optionally, check out a release branch to install an Aptos node. We suggest you check out `devnet` for your first development. See [Choose a network](./system-integrators-guide.md#choose-a-network) for an explanation of their differences. - -
-Release Branches - - - - git checkout --track origin/devnet - - - - - git checkout --track origin/testnet - - - - - git checkout --track origin/mainnet - - - -
- -## Set up build dependencies - -Prepare your developer environment by installing the dependencies needed to build, test and inspect Aptos Core. -No matter your selected mechanism for installing these dependencies, **it is imperative you keep your entire toolchain up-to-date**. If you encounter issues later, update all packages and try again. - -
-macOS - -**> Using the automated script** - -1. Ensure you have `brew` package manager installed: https://brew.sh/ -1. Run the dev setup script to prepare your environment: `./scripts/dev_setup.sh` -1. Update your current shell environment: `source ~/.cargo/env`. - -:::tip -You can see the available options for the script by running `./scripts/dev_setup.sh --help` -::: - -**> Manual installation of dependencies** - -If the script above doesn't work for you, you can install these manually, but it's **not recommended**. - -1. [Rust](https://www.rust-lang.org/tools/install) -1. [CMake](https://cmake.org/download/) -1. [LLVM](https://releases.llvm.org/) -1. [LLD](https://lld.llvm.org/) - -
- -
-Linux - -**> Using the automated script** - -1. Run the dev setup script to prepare your environment: `./scripts/dev_setup.sh` -1. Update your current shell environment: `source ~/.cargo/env` - -:::tip -You can see the available options for the script by running `./scripts/dev_setup.sh --help` -::: - -**> Manual installation of dependencies** - -If the script above does not work for you, you can install these manually, but it is **not recommended**: - -1. [Rust](https://www.rust-lang.org/tools/install). -1. [CMake](https://cmake.org/download/). -1. [LLVM](https://releases.llvm.org/). -1. [libssl-dev](https://packages.ubuntu.com/jammy/libssl-dev) and [libclang-dev](https://packages.ubuntu.com/jammy/libclang-dev) - - -
- -
-Windows - -**> Using the automated script** - -1. Open a PowerShell terminal as an administrator. -1. Run the dev setup script to prepare your environment: `PowerShell -ExecutionPolicy Bypass -File ./scripts/windows_dev_setup.ps1` - -**> Manual installation of dependencies** - -1. Install [Rust](https://www.rust-lang.org/tools/install). -1. Install [LLVM](https://releases.llvm.org/). Visit their GitHub repository for the [latest prebuilt release](https://github.com/llvm/llvm-project/releases/tag/llvmorg-15.0.7). -1. Install [Microsoft Visual Studio Build Tools for Windows](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2022). During setup, select "Desktop development with C++" and three additional options: MSVC C++ build tools, Windows 10/11 SDK, and C++ CMake tools for Windows. -1. If on Windows ARM, install [Visual Studio](https://visualstudio.microsoft.com/vs). -1. If not already installed during Visual Studio/Build Tools installation, install [CMake](https://cmake.org/download/). - -1. Open a new PowerShell terminal after installing all dependencies - -
- -### Additional Tools - -If you used `scripts/dev_setup.sh` for MacOS or Linux setup, additional tools are optionally available. - -#### TypeScript -Typically only needed for _developing_ the TypeScript SDK. -[Using the released SDK can be achieved from npm/pnpm/yarn](../sdks/ts-sdk/index). -```bash -scripts/dev_setup.sh -J -``` - -#### PostgreSQL -Used in the Indexer. -```bash -scripts/dev_setup.sh -P -``` - -#### Move Prover Tools -```bash -scripts/dev_setup.sh -y -p -``` - - - -Now your basic Aptos development environment is ready. Head over to our [Developer Tutorials](../tutorials/index.md) to get started in Aptos. - -## Building Aptos - -The simplest check that you have a working environment is to build everything and run the tests. - -```bash -cargo build -cargo test -- --skip prover -``` - -If you installed the Move Prover Tools above then you don't need to skip the prover tests. - -Other documentation of specific tools has recommended patterns for `cargo build` and `cargo run` - -* [Run a Local Development Network](../guides/local-development-network.md) -* [Indxer](../indexer/legacy/indexer-fullnode.md) -* [Node Health Checker](../nodes/measure/node-health-checker.md) -* [Running a Local Multinode Network](running-a-local-multi-node-network.md) diff --git a/developer-docs-site/docs/guides/data-pruning.md b/developer-docs-site/docs/guides/data-pruning.md deleted file mode 100644 index 95a5eb4825afc..0000000000000 --- a/developer-docs-site/docs/guides/data-pruning.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: "Data Pruning" -slug: "data-pruning" ---- - -# Data Pruning - -When a validator node is running, it participates in consensus to execute -transactions and commit new data to the blockchain. Similarly, when fullnodes -are running, they sync the new blockchain data through [state synchronization](../guides/state-sync.md). -As the blockchain grows, storage disk space can be managed by pruning old -blockchain data. Specifically, by pruning the **ledger history**: which -contains old transactions. By default, ledger pruning is enabled on all -nodes with a pruning window that can be configured. This document describes -how you can configure the pruning behavior. - -:::note -By default the ledger pruner keeps 150 million recent transactions. The approximate amount of disk space required for every 150M transactions is 200G. Unless -bootstrapped from the genesis and configured to disable the pruner or a long -prune window, the node doesn't carry the entirety of the ledger history. -Majority of the nodes on both the testnet and mainnet have a partial -history of 150 million transactions according to this configuration. -::: - - -To manage these settings, edit the node configuration YAML files, -for example, `fullnode.yaml` for fullnodes (validator or public) or -`validator.yaml` for validator nodes, as shown below. - -## Disabling the ledger pruner - -Add the following to the node configuration YAML file to disable the -ledger pruner: - -:::caution Proceed with caution -Disabling the ledger pruner can result in the storage disk filling up very quickly. -::: - -```yaml -storage: - storage_pruner_config: - ledger_pruner_config: - enable: false -``` - -## Configuring the ledger pruning window - -Add the following to the node configuration YAML file to make the node -retain, for example, 1 billion transactions and their outputs, including events -and write sets. - -:::caution Proceed with caution -Setting the pruning window smaller than 100 million can lead to runtime errors and damage the health of the node. -::: - -```yaml -storage: - storage_pruner_config: - ledger_pruner_config: - prune_window: 1000000000 -``` - -See the complete set of storage configuration settings in the [Storage README](https://github.com/aptos-labs/aptos-core/tree/main/storage#configs). diff --git a/developer-docs-site/docs/guides/explore-aptos.md b/developer-docs-site/docs/guides/explore-aptos.md deleted file mode 100644 index 5c6438ef828c2..0000000000000 --- a/developer-docs-site/docs/guides/explore-aptos.md +++ /dev/null @@ -1,275 +0,0 @@ ---- -title: "Explore Aptos" -slug: "explore-aptos" ---- -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Use the Aptos Explorer - -The [Aptos Explorer](https://explorer.aptoslabs.com/) lets you delve into the activity on the Aptos blockchain in great detail, seeing transactions, validators, and account information. With the Aptos Explorer, you can ensure that the transactions performed on Aptos are accurately reflected. Note, the Aptos ecosystem has [several other explorers](https://github.com/aptos-foundation/ecosystem-projects#explorers) to choose from. - -The Aptos Explorer provides a one-step search engine across the blockchain to discover details about wallets, transactions, network analytics, user accounts, smart contracts, and more. The Aptos Explorer also offers dedicated pages for key elements of the blockchain and acts as the source of truth for all things Aptos. See the [Aptos Glossary](../reference/glossary.md) for definitions of many of the terms found here. - -## Users - -The Aptos Explorer gives you a near real-time view into the status of the network and the activity related to the core on-chain entities. It serves these audiences and purposes by letting: - -* App developers understand the behavior of the smart contracts and sender-receiver transaction flows. -* General users view and analyze Aptos blockchain activity on key entities - transactions, blocks, accounts, and resources. -* Node operators check the health of the network and maximize the value of operating the node. -* Token holders find the best node operator to delegate the tokens and earn a staking reward. - -## Common tasks - -Follow the instructions here to conduct typical work in the Aptos Explorer. - -### Select a network - -The Aptos Explorer renders data from all Aptos networks: Mainnet, Testnet, Devnet, and your local host if configured. See [Aptos Blockchain Networks](../nodes/networks.md) for a detailed view of their purposes and differences. - -To select a network in the [Aptos Explorer](https://explorer.aptoslabs.com/), load the explorer and use the *Select Network* drop-down menu at the top right to select your desired network. - -
- -
- -### Find a transaction - -One of the most common tasks is to track a transaction in Aptos Explorer. You may search by the account address, transaction version and hash, or block height and version. - -To find a transaction: - -1. Enter the value in the *Search transactions* field near the top of any page. -1. Do not press return. -1. Click the transaction result that appears immediately below the search field, highlighted in green within the following screenshot: - -
- -
- -The resulting [Transaction details](#transaction-details) page appears. - -### Find an account address - -The simplest way to find your address is to use the [Aptos Petra Wallet](https://petra.app/docs/use). - -Then simply append it to the following URL to load its details in the Aptos Explorer: -https://explorer.aptoslabs.com/account/ - -Like so: -https://explorer.aptoslabs.com/account/0x778bdeebb67d3914b181236c2f1f4acc0e561482fc265b9a5709488a97fb3303 - -See [Accounts](#accounts) for instructions on use. - -## Explorer pages - -This section walks you through the available screens in Aptos Explorer to help you find the information you need. - -### Explorer home - -The Aptos Explorer home page provides an immediate view into the total supply of Aptos coins, those that are now staked, transactions per second (TPS), and active validators on the network, as well as a rolling list of the latest transactions: - -
- -
- -Click the **Transactions** tab at the top or **View all Transactions** at the bottom to go to the [Transactions](#transactions) page. - -### Transactions - -The *Transactions* page displays all transactions on the Aptos blockchain in order, with the latest at the top of an ever-growing list. - -In the transactions list, single-click the **Hash** column to see and copy the hash for the transaction or double-click the hash to go directly to the transaction details for the hash. - -
- -
- -Otherwise, click anywhere else in the row of the desired transaction to load its [Transaction details](#transaction-details) page. - -Use the controls at the bottom of the list to navigate back through transactions historically. - -### Transaction details - -The *Transaction details* page reveals all information for a given transaction, starting with its default *Overview* tab. There you can see a transaction's status, sender, version, gas fee, and much more: - -
- -
- -Scrolling down on the Overview, you can also see the transaction's signature (with `public_key`) and hashes for tracking. - -The Transaction details page offers even more information in the following tabs. - -#### Events - -The Transaction details *Events* tab shows the transaction's [sequence numbers](../reference/glossary.md#sequence-number), including their types and data. - -#### Payload - -The Transaction details *Payload* tab presents the transaction's actual code used. Click the down arrow at the bottom of the code block to expand it and see all contents. - -#### Changes - -The Transaction details *Changes* tab shows the addresses, state key hashes, and data for each index in the transaction. - -### Accounts - -The *Accounts* page aggregates all transactions, tokens, and other resources in a single set of views starting with its default *Transactions* tab: - -
- -
- -You can load your account page by appending your account address to: -https://explorer.aptoslabs.com/account/ - -See [Find account address](#find-account-address) for more help. - -On the Accounts > Transactions tab, click any transaction to go to its [Transaction details](#transaction-details) page. - -As on the main [Transactions](#transactions) page, you may also single-click the **Hash** column to see and copy the hash for the transaction or double-click the hash to go directly to the transaction details for the hash. - -As with Transactions, the Aptos Explorer provides tabs for additional information about the account. - -#### Tokens - -The *Tokens* tab presents any assets owned by the account, as well as details about the tokens themselves (name, collection, and more). Click any of the assets to go to the [Token details](#token-details) page. - -#### Token details - -The *Token details* page contains: - - * *Overview* tab including token name, owner, collection, creator, royalty, and more. - * *Activities* tab showing all transfer types, the addresses involved, property version, and amount. - -
- -
- -On either tab, click an address to go to the *Account* page for the address. - -#### Resources - -The *Resources* tab presents a view of all types used by the account. Use the *Collapse All* toggle at top right to see all types at once. - -#### Modules - -The *Modules* tab displays the source code and ABI used by the account. Select different modules on the left sidebar to view Move source code and ABI of a specific module. Use the expand button at the top right of the source code to expand the code for better readability. - -
- -
- -#### Info - -The *Info* tab shows the [sequence number](../reference/glossary.md#sequence-number) and authentication key used by the account. - -### Blocks - -The *Blocks* page presents a running list of the latest blocks to be committed to the Aptos blockchain. - -
- -
- -Click the: - * Hash to see and copy the hash of the block. - * First version to go to the first transaction in the block. - * Last version to go to the last transaction in the block. - * Block ID or anywhere else to go to the [Block details](#block-details) page. - -### Block details - -The *Block details* page contains: - - * *Overview* tab including block height, versions, timestamp, proposer, epoch and round. - * *Transactions* tab showing the version, status, type, hash, gas, and timestamp. - -
- -
- - On the *Overview* tab, click the versions to go to the related transactions or double-click the address of the proposer to go to the *Account* page for that address. - - On the *Transactions* tab, click the desired row to go to the *Transactions details* page. - -### Validators - -The *Validators* page lists every validator on the Aptos blockchain, including their validator address, voting power, public key, fullnode address, and network address. - -
- -
- -Click the validator address to go to the *Account* page for that address. Click the public key or any of the other addresses to see and copy their values. diff --git a/developer-docs-site/docs/guides/local-development-network.md b/developer-docs-site/docs/guides/local-development-network.md deleted file mode 100644 index fc34b5187fd7f..0000000000000 --- a/developer-docs-site/docs/guides/local-development-network.md +++ /dev/null @@ -1,368 +0,0 @@ ---- -title: "Run a Local Development Network" ---- - -# Run a Local Development Network - -You can run the Aptos network locally. This local network will not be connected to any production Aptos network (e.g. mainnet), it will run on your local machine, independent of other Aptos networks. Building against a local network has a few advantages: -- **No ratelimits:** Hosted services (including the Node API, Indexer API, and faucet) are generally subject to ratelimits. Local development networks have no ratelimits. -- **Reproducibility:** When using a production network you might have to repeatedly make new accounts or rename Move modules to avoid incompatibility issues. With a local network you can just choose to start from scratch. -- **High availability:** The Aptos devnet and testnet networks are periodically upgraded, during which time they can be unavailable. The internet can also be unreliable sometimes. Local development networks are always available, even if you have no internet access. - -## Prerequisites -In order to run a local development network you must have the following installed: -- Aptos CLI: [Installation Guide](../tools/aptos-cli/install-cli/index.md). -- Docker: [Installation Guide](https://docs.docker.com/get-docker/). - - Docker Desktop is the strongly recommended installation method. - -:::tip -If you do not want to run an [Indexer API](../indexer/api/index.md) as part of your local network (`--with-indexer-api`) you do not need to install Docker. Note that without the Indexer API your local network will be incomplete compared to a production network. Many features in the downstream tooling will not work as expected / at all without this API available. -::: - -## Run a local network - -You can run a local network using the following Aptos CLI command: -```bash -aptos node run-local-testnet --with-indexer-api -``` - -**Note:** Despite the name (`local-testnet`), this has nothing to with the Aptos testnet, it will run a network entirely local to your machine. - -You should expect to see output similar to this: -``` -Readiness endpoint: http://0.0.0.0:8070/ - -Indexer API is starting, please wait... -Node API is starting, please wait... -Transaction stream is starting, please wait... -Postgres is starting, please wait... -Faucet is starting, please wait... - -Completed generating configuration: - Log file: "/Users/dport/.aptos/testnet/validator.log" - Test dir: "/Users/dport/.aptos/testnet" - Aptos root key path: "/Users/dport/.aptos/testnet/mint.key" - Waypoint: 0:397412c0f96b10fa3daa24bfda962671c3c3ae484e2d67ed60534750e2311f3d - ChainId: 4 - REST API endpoint: http://0.0.0.0:8080 - Metrics endpoint: http://0.0.0.0:9101/metrics - Aptosnet fullnode network endpoint: /ip4/0.0.0.0/tcp/6181 - Indexer gRPC node stream endpoint: 0.0.0.0:50051 - -Aptos is running, press ctrl-c to exit - -Node API is ready. Endpoint: http://0.0.0.0:8080/ -Postgres is ready. Endpoint: postgres://postgres@127.0.0.1:5433/local_testnet -Transaction stream is ready. Endpoint: http://0.0.0.0:50051/ -Indexer API is ready. Endpoint: http://127.0.0.1:8090/ -Faucet is ready. Endpoint: http://127.0.0.1:8081/ - -Applying post startup steps... - -Setup is complete, you can now use the local testnet! -``` - -Once you see this final line, you know the local testnet is ready to use: -``` -Setup is complete, you can now use the local testnet! -``` - -As you can see from the output, once the local network is running, you have access to the following services: -- [Node API](../nodes/aptos-api-spec.md): This is a REST API that runs directly on the node. It enables core write functionality such as transaction submission and a limited set of read functionality, such as reading account resources or Move module information. -- [Indexer API](../indexer/api/index.md): This is a GraphQL API that provides rich read access to indexed blockchain data. If you click on the URL for the Indexer API above, by default http://127.0.0.1:8090, it will open the Hasura Console. This is a web UI that helps you query the Indexer GraphQL API. -- [Faucet](../reference/glossary#faucet): You can use this to create accounts and mint APT on your local network. -- [Transaction Stream Service](../indexer/txn-stream/index.md): This is a grpc stream of transactions. This is relevant to you if you are developing a [custom processor](../indexer/custom-processors/index.md). -- Postgres: This is the database that the indexer processors write to. The Indexer API reads from this database. - -## Using the local network - -### Configuring your Aptos CLI - -You can add a separate profile, as shown below: - -```bash -aptos init --profile local --network local -``` - -and you will get an output like below. At the `Enter your private key...` command prompt press enter to generate a random new key. - -```bash -Configuring for profile local -Using command line argument for rest URL http://localhost:8080/ -Using command line argument for faucet URL http://localhost:8081/ -Enter your private key as a hex literal (0x...) [Current: None | No input: Generate new key (or keep one if present)] -``` - -This will create and fund a new account, as shown below: - -```bash -No key given, generating key... -Account 7100C5295ED4F9F39DCC28D309654E291845984518307D3E2FE00AEA5F8CACC1 doesn't exist, creating it and funding it with 10000 coins -Aptos is now set up for account 7100C5295ED4F9F39DCC28D309654E291845984518307D3E2FE00AEA5F8CACC1! Run `aptos help` for more information about commands -{ - "Result": "Success" -} -``` - -From now on you should add `--profile local` to CLI commands to run them against the local network. - -### Configuring the TypeScript SDK - -In order to interact with the local network using the TypeScript SDK, use the local network URLs when building the client: -```typescript -import { Provider, Network } from "aptos"; - -const provider = new Provider(Network.LOCAL); -``` - -The provider is a single super client for both the node and indexer APIs. - -## Resetting the local network - -Sometimes while developing it is helpful to reset the local network back to its initial state: -- You made backwards incompatible changes to a Move module and you'd like to redeploy it without renaming it or using a new account. -- You are building a [custom indexer processor](../indexer/custom-processors/index.md) and would like to index using a fresh network. -- You want to clear all on chain state, e.g. accounts, objects, etc. - -To start with a brand new local network, use the `--force-restart` flag: -```bash -aptos node run-local-testnet --force-restart -``` - -It will then prompt you if you really want to restart the chain, to ensure that you do not delete your work by accident. - -```bash -Are you sure you want to delete the existing chain? [yes/no] > -``` - -If you do not want to be prompted, include `--assume-yes` as well: -```bash -aptos node run-local-testnet --force-restart --assume-yes -``` - -## FAQ - -### Where can I get more information about the run-local-testnet command? - -More CLI help can be found by running the command: - -```bash -aptos node run-local-testnet --help -``` - -It will provide information about each of the flags you can use. - - -### I'm getting the error `address already in use`, what can I do? - -If you're getting an error similar to this error: - -```bash -'panicked at 'error binding to 0.0.0.0:9101: error creating server listener: Address already in use (os error 48)' -``` - -This means one of the ports needed by the local network is already in use by another process. - -On Unix systems, you can run the following command to get the name and PID of the process using the port: - -```bash -lsof -i :8080 -``` - -You can then kill it like this: -```bash -kill $PID -``` - -### How do I change the ports certain services run on? - -You can find flags to configure this for each service in the CLI help output: -``` -aptos node run-local-testnet -h -``` - -The help output tells you which ports services use by default. - -### How do I opt out of running certain services? - -- Opt out of running a faucet with `--no-faucet`. -- Opt out of running a Transaction Stream Service with `--no-txn-stream`. - - -### How do I publish Move modules to the local testnet? - -If you set up a profile called `local` above, you can run any command by adding the `--profile local` flag. In this case, we also use `local` as the named address in the `HelloBlockchain` example. The CLI will replace `local` with the account address for that profile. - -```bash -aptos move publish --profile local --package-dir /opt/git/aptos-core/aptos-move/move-examples/hello_blockchain --named-addresses HelloBlockchain=local -``` - -### How do I see logs from the services? -In the output of the CLI you will see something like this: -``` -Test dir: "/Users/dport/.aptos/testnet" -``` - -The logs from each of the services can be found in here. There are directories for the logs for each service. For processor logs, see the `tokio-runtime` directory. - -### What if it says Docker is not available? -To run an Indexer API using `--with-indexer-api` you need to have Docker on your system. - -You might be seeing an error that looks like this: -``` -Unexpected error: Failed to apply pre run steps for Postgres: Docker is not available, confirm it is installed and running. On Linux you may need to use sudo -``` - -Make sure you have Docker 24+: -```bash -$ docker --version -Docker version 24.0.6, build ed223bc -``` - -Make sure the Docker daemon is running. If you see this error it means it is not running: -```bash -$ docker info -... -ERROR: Cannot connect to the Docker daemon at unix:///Users/dport/.docker/run/docker.sock. Is the docker daemon running? -``` - -Make sure the socket for connecting to Docker is present on your machine in the default location. For example on Unix systems this file should exist: -``` -/var/run/docker.sock -``` - -If it doesn't, open Docker Desktop and enable `Settings -> Advanced -> Allow the default Docker socket to be used`. - -Alternatively, you can find where it is like this: -``` -$ docker context inspect | grep Host - "Host": "unix:///Users/dport/.docker/run/docker.sock", -``` - -Then make a symlink to it in the expected location: -``` -sudo ln -s /Users/dport/.docker/run/docker.sock /var/run/docker.sock -``` - -Alternatively, run the CLI like this to tell it where the socket is: -``` -DEFAULT_SOCKET=/Users/dport/.docker/run/docker.sock aptos node run-local-testnet --with-indexer-api -``` - -Note: As mentioned above, if you're on Mac or Windows, we recommend you use Docker Desktop rather than installing Docker via a package manager (e.g. Homebrew or Choco). - -### The local network seems to hang on startup -If the CLI seems to sit there and do nothing when you are using `--with-indexer-api`, consider quitting and restarting Docker. Sometimes Docker gets in a bad state. Note that Docker is only required if you are using `--with-indexer-api`. - -### How do I use the Postgres on my host machine? -By default when using `--with-indexer-api` the CLI will run a Postgres instance in Docker. If you have Postgres running on your host machine and would like to use that instead, you can do so with the `--use-host-postgres` flag. There are also flags for specifying how it should connect to the host Postgres. Here is an example invocation: -```bash -aptos node run-local-testnet --with-indexer-api --use-host-postgres --postgres-user $USER -``` - -### How do I wait for the local network to come up programmatically? -When running the CLI interactively, you can see if the network is alive by waiting for this message: -``` -Setup is complete, you can now use the local testnet! -``` - -If you are writing a script and would like to wait for the local network to come up, you can make a GET request to `http://127.0.0.1:8070`. At first this will return [503](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/503). When it returns [200](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200) it means all the services are ready. - -You can inspect the response to see which services are ready. - -
-Example using curl -

- -```json -$ curl http://127.0.0.1:8070 | jq . -{ - "ready": [ - { - "Http": [ - "http://127.0.0.1:43236/", - "processor_default_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43240/", - "processor_token_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43242/", - "processor_user_transaction_processor" - ] - }, - { - "Postgres": "postgres://postgres@127.0.0.1:5433/local_testnet" - }, - { - "Http": [ - "http://127.0.0.1:8081/", - "Faucet" - ] - }, - { - "IndexerApiMetadata": "http://127.0.0.1:8090/" - }, - { - "Http": [ - "http://127.0.0.1:8090/", - "Indexer API" - ] - }, - { - "NodeApi": "http://0.0.0.0:8080/" - }, - { - "Http": [ - "http://127.0.0.1:43239/", - "processor_stake_processor" - ] - }, - { - "DataServiceGrpc": "http://0.0.0.0:50051/" - }, - { - "Http": [ - "http://127.0.0.1:43235/", - "processor_coin_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43237/", - "processor_events_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43234/", - "processor_account_transactions_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43241/", - "processor_token_v2_processor" - ] - }, - { - "Http": [ - "http://127.0.0.1:43238/", - "processor_fungible_asset_processor" - ] - } - ], - "not_ready": [] -} -``` - -

-
- -### How do I learn more about the Aptos CLI? -If you are new to the Aptos CLI see this comprehensive [Aptos CLI user documentation](../tools/aptos-cli/use-cli/use-aptos-cli.md). diff --git a/developer-docs-site/docs/guides/nfts/aptos-token-overview.md b/developer-docs-site/docs/guides/nfts/aptos-token-overview.md deleted file mode 100644 index 95d166d2ab149..0000000000000 --- a/developer-docs-site/docs/guides/nfts/aptos-token-overview.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -title: "Aptos Token Overview" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Token Standards - -The [Aptos Digital Asset Standard](../../standards/digital-asset.md) defines the canonical Nonfungible Token on Aptos. Aptos leverages composability to extend the digital asset standard with features like fungibility via the [Fungible Asset standard](../../standards/fungible-asset.md). The concept of composability comes from the underlying data model for these constructs: the [Move object](../../standards/aptos-object.md) data model. - -The rest of this document discusses how the Aptos token standards compare to the standards on Ethereum and Solana. - -## Data models - -To understand tokens, we begin by comparing the data models across different blockchains. - -### Ethereum - -Ethereum has two types of accounts: -* Externally-owned accounts which store a balance of Ether. -* Contract accounts which manage their underlying smart contracts and have an associated storage for persistent state, which can only be mutated by the associated contract. - -In order to create a new NFT collection, a creator must deploy their own contract to the blockchain, which in turn will create a collection and set of NFTs within its storage. - -### Solana - -Unlike Ethereum or Aptos where data and code co-exist, Solana stores data and programs in separate accounts. There are two types of accounts on the Solana blockchain: -* Executable accounts only store contract code -* Non-executable accounts store data associated with and owned by executable accounts. - -In order to create a new NFT collection, a creator calls an existing deployed program to populate a new collection and set of NFTs. - -### Aptos - -The [accounts](../../concepts/accounts.md) in Aptos store both smart contracts and data. Unlike Ethereum, the associated data of a smart contract is distributed across the space of all accounts in [resources](../../concepts/resources.md) within [accounts](../../concepts/accounts.md) or [objects](../../standards/aptos-object.md). For example, a collection and an NFT within that collection are stored in distinct objects at different addresses with the smart contract defining them at another address. A smart contract developer could also store data associated with the NFT and collection at the same address as the smart contract or in other objects. - -There are two means to create NFTs on Aptos: - -* The [no-code standard](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-22.md) allows creators to call into the contract to create new collections and tokens without deploying a new contract. -* Custom NFT contracts allow creators to customize their NFTs by extending the object model that can manage all aspects of their collection. - -Aptos strikes a balance between the customizability offered by Ethereum with the simplicity of creating new collections like Solana. - -Like Ethereum, Aptos requires indexing to determine the set of all NFTs owned by an account, while Solana has no need. - -## Token standard comparison - -The Fungible Token (FT) was initially introduced by [EIP-20](https://eips.ethereum.org/EIPS/eip-20), and Non-Fungible Token (NFT) was defined in [EIP-721](https://eips.ethereum.org/EIPS/eip-721). Later, [EIP-1155](https://eips.ethereum.org/EIPS/eip-1155) combined FT and NFT or even Semi-Fungible Token (SFT) into one standard. - -The Ethereum token standards requires each token to deploy their own individual contract code to distinguish collection of tokens. Solana account model enables another pattern where code can be reused so that one generic program operates on various data. To create a new token, you could create an account that can mint tokens and more accounts that can receive them. The mint account itself uniquely determines the token type instead of contract account, and these are all passed as arguments to the one contract deployed to some executable account. - -The collection of Aptos token standards shares some similarities with Solana, especially how it covers FT, NFT and SFT into a common on-chain code. Instead of deploying a new smart contract for each new token, a creator calls a function in the contract with the necessary arguments. Depending on which function you call, the token contract will mint/transfer/burn/... tokens. - -### Token identification - -Aptos identifies a token by its `Address` or `ObjectId`, a location within global storage. Collections are stored at a location determined by the address of the creator and the name of the collection. - -In Ethereum, contracts are deployed on accounts determined by the account that is deploying the contract. NFTs are then stored as indexes into data tables within the contract. - -In Solana, NFT data is stored under a mint account, independent of the program account. - -### Token metadata - -Aptos token has metadata in its `Token` resource with the data most commonly required by dapps to interact with tokens. Some examples include: -- `name`: The name of the token. It must be unique within a collection. -- `description`: The description of the token. -- `uri`: A URL pointer to off-chain for more information about the token. The asset could be media such as an image or video or more metadata in a JSON file. -- `collection`: A pointer to the ObjectId of the collection. - -Additional fields can be stored in creator-defined resources or the `PropertyMap` resource that defines a generalizable key-value map. - -In Ethereum, only a small portion of such properties are defined as methods, such as `name()`, `symbol()`, `decimals()`, `totalSupply()` of ERC-20; or `name()` and `symbol()` and `tokenURI()` of the optional metadata extension for ERC-721; ERC-1155 also has a similar method `uri()` in its own optional metadata extension. Token metadata is not standardized so that dapps have to take special treatment case by case. - -In Solana, the Token Metadata program offers a Metadata Account defining numerous metadata fields associated with a token as well, including `collection` which is defined in `TokenDataId` in Aptos. Solana, however, does not offer mutability for assets, unlike Aptos. Like Aptos, Token Metadata v1.1.0 offers an `attribute` container for customized properties. diff --git a/developer-docs-site/docs/guides/running-a-local-multi-node-network.md b/developer-docs-site/docs/guides/running-a-local-multi-node-network.md deleted file mode 100644 index 94dd274666672..0000000000000 --- a/developer-docs-site/docs/guides/running-a-local-multi-node-network.md +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: "Run a Local Multinode Network" -slug: "running-a-local-multi-node-network" ---- - -# Run a Local Multinode Network - -This guide describes how to run a local network with multiple validator nodes and validator fullnodes. You will use the [Aptos Forge CLI](https://github.com/aptos-labs/aptos-core/tree/main/testsuite/forge-cli/src) for this. - -:::tip Use only for test networks -The method described in this guide should be used only for test networks of multi-node local networks. Do not use this guide for deploying in production environments. Currently this is the only guide for multi-node networks. - -For deploying a local network with a single node, see [Run a Local Development Network with the CLI](../guides/local-development-network.md). -::: - -## Before you proceed - -This guide assumes you have done the steps in [Building Aptos From Source](building-from-source.md) - -## Running multiple validators - -To deploy multiple local validators, run: - -```bash -cargo run -p aptos-forge-cli \ - -- \ - --suite "run_forever" \ - --num-validators 4 test local-swarm -``` - -This will start a local network of 4 validators, each running in their own process. The network will run forever unless you manually terminate it. - -The terminal output will display the locations of the validator files (for example, the genesis files, logs, node configurations, etc.) and the commands that were run to start each node. The process id (PID) of each node and server addresses (e.g., REST APIs) are also displayed when it starts. For example, if you run the above command you should see: - -```bash -... -2022-09-01T15:41:27.228289Z [main] INFO crates/aptos-genesis/src/builder.rs:462 Building genesis with 4 validators. Directory of output: "/private/var/folders/dx/c0l2rrkn0656gfx6v5_dy_p80000gn/T/.tmpq9uPMJ" -... -2022-09-01T15:41:28.090606Z [main] INFO testsuite/forge/src/backend/local/swarm.rs:207 The root (or mint) key for the swarm is: 0xf9f... -... -2022-09-01T15:41:28.094800Z [main] INFO testsuite/forge/src/backend/local/node.rs:129 Started node 0 (PID: 78939) with command: ".../aptos-core/target/debug/aptos-node" "-f" "/private/var/folders/dx/c0l2rrkn0656gfx6v5_dy_p80000gn/T/.tmpq9uPMJ/0/node.yaml" -2022-09-01T15:41:28.094825Z [main] INFO testsuite/forge/src/backend/local/node.rs:137 Node 0: REST API is listening at: http://127.0.0.1:64566 -2022-09-01T15:41:28.094838Z [main] INFO testsuite/forge/src/backend/local/node.rs:142 Node 0: Inspection service is listening at http://127.0.0.1:64568 -... -``` - -Using the information from this output, you can stop a single node and restart -it. For example, to stop and restart the node `0`, execute the below commands: - -```bash -kill -9 -cargo run -p aptos-node \ - -- \ - -f -``` - -## Faucet and minting - -In order to mint coins in this test network you need to run a faucet. You can do that with this command: - -```bash -cargo run -p aptos-faucet-service -- run-simple --key --node-url -``` - -You can get the values above like this: -- `key`: When you started the swarm, there was output like this: `The root (or mint) key for the swarm is: 0xf9f...`. This is the `key`. -- `node_url`: When you started the swarm, there was output like this: `REST API is listening at: http://127.0.0.1:64566`. This is the `node_url`. - -The above command will run a faucet locally, listening on port `8081`. Using this faucet, you can then mint tokens to your test accounts, for example: - -```bash -curl -X POST http://127.0.0.1:8081/mint?amount=&pub_key= -``` - -As an alternative to using the faucet service, you may use the faucet CLI directly: -``` -cargo run -p aptos-faucet-cli -- --amount 10 --accounts --key -``` - -:::tip Faucet and Aptos CLI -See more on how the faucet works in the [README](https://github.com/aptos-labs/aptos-core/tree/main/crates/aptos-faucet). - -Also see how to use the [Aptos CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md#account-examples) with an existing faucet. -::: - -## Validator fullnodes - -To also run validator fullnodes inside the network, use the `--num-validator-fullnodes` flag. For example: -```bash -cargo run -p aptos-forge-cli \ - -- \ - --suite "run_forever" \ - --num-validators 3 \ - --num-validator-fullnodes 1 test local-swarm -``` - -## Additional usage - -To see all tool usage options, run: -```bash -cargo run -p aptos-forge-cli --help -``` diff --git a/developer-docs-site/docs/guides/sponsored-transactions.md b/developer-docs-site/docs/guides/sponsored-transactions.md deleted file mode 100644 index 04258b2293a1e..0000000000000 --- a/developer-docs-site/docs/guides/sponsored-transactions.md +++ /dev/null @@ -1,71 +0,0 @@ -# Sponsored Transactions - -As outlined in [AIP-39](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-39.md),sponsored transactions allow one account to pay the fees associated with executing a transaction for another account. Sponsored transactions simplify the process for onboarding users into applications by allowing the application to cover all associated fees for interacting with the Aptos blockchain. Here are two examples: -* [MerkleTrade](https://merkle.trade/) offers low cost trading to those with Ethereum wallets by creating an Aptos wallet for users and covering all transaction fees so that the user does not need to acquire utility tokens for Aptos. -* Community engagement applications like [Graffio](https://medium.com/aptoslabs/graffio-web3s-overnight-sensation-81a6cf18b626) offered to cover transaction fees for custodial accounts to support the collaborative drawing application for those without wallets. - -## Process Overview - -The process for sending a sponsored transaction follows: -* The sender of the transaction determines upon an operation, as defined by a `RawTransaction`. -* The sender generates a `RawTransactionWithData::MultiAgentWithFeePayer` structure - * Prior to the framework 1.8 release, this must contain the fee payer's address. - * After framework release 1.8, this can optionally be set to `0x0`. -* (Optionally) the sender aggregates signatures from other signers. -* The sender can forward the signed transaction to the fee payer to sign and forward it to the blockchain. -* Upon execution of the transaction, the sequence number of the sender account is incremented, all gas fees are deducted from the gas fee payer, and all refunds are sent to the gas fee payer. - -Alternatively, if the fee payer knows the operation and all signers involved, the fee payer could generate and sign the transaction and send it back to the other signers to sign. - -## Technical Details - -In Aptos, a sponsored transaction reuses the same SignedTransaction as any other user transaction: -```rust -pub struct SignedTransaction { - /// The raw transaction - raw_txn: RawTransaction, - - /// Public key and signature to authenticate - authenticator: TransactionAuthenticator, -} -``` - -The difference is in the `TransactionAuthenticator`, which stores the authorization from the fee payer of the transaction to extract utility fees from their account: -```rust -pub enum TransactionAuthenticator { -... - /// Optional Multi-agent transaction with a fee payer. - FeePayer { - sender: AccountAuthenticator, - secondary_signer_addresses: Vec, - secondary_signers: Vec, - fee_payer_address: AccountAddress, - fee_payer_signer: AccountAuthenticator, - }, -... -} -``` - -To prepare a sponsored transaction for an account, the account must first exist on-chain. This is a requirement that is being removed with the 1.8 framework release. - -As of the 1.8 framework release, an account does not need to exist on-chain. However, the first transaction for an account requires enough gas to not only execute the transaction and cover the costs associated with account creation, even if an account already exists. Future improvements to the account model intend to eliminate this requirement. - -During signing of the transaction, all parties sign the following: -```rust -pub enum RawTransactionWithData { -... - MultiAgentWithFeePayer { - raw_txn: RawTransaction, - secondary_signer_addresses: Vec, - fee_payer_address: AccountAddress, - }, -} -``` - -Prior to framework release 1.8, all signers were required to know the actual fee payer address prior to signing. As of framework release 1.8, signers can optionally set the address to `0x0` and only the fee payer must sign with their address set. - -## SDK Support - -Currently, there are two demonstrations of sponsored transactions: -* The Python SDK has an example in [fee_payer_transfer_coin.py](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/python/sdk/examples/fee_payer_transfer_coin.py). -* The Rust SDK has a test case in [the API tests](https://github.com/aptos-labs/aptos-core/blob/0a62e54e13bc5da604ceaf39efed5c012a292078/api/src/tests/transactions_test.rs#L255). diff --git a/developer-docs-site/docs/guides/state-sync.md b/developer-docs-site/docs/guides/state-sync.md deleted file mode 100644 index a6b6a4a62020e..0000000000000 --- a/developer-docs-site/docs/guides/state-sync.md +++ /dev/null @@ -1,181 +0,0 @@ ---- -title: "State Synchronization" -slug: "state-sync" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# State Synchronization - -Nodes in an Aptos network (e.g., validator nodes and fullnodes) must always be synchronized to the latest Aptos blockchain state. The [state synchronization](https://medium.com/aptoslabs/the-evolution-of-state-sync-the-path-to-100k-transactions-per-second-with-sub-second-latency-at-52e25a2c6f10) (state sync) component that runs on each node is responsible for this. State sync identifies and fetches new blockchain data from the peers, validates the data and persists it to the local storage. - -:::tip Need to start a node quickly? -If you need to start a node quickly, here's what we recommend by use case: - - **Devnet public fullnode**: To sync the entire blockchain history, use [intelligent syncing](state-sync.md#intelligent-syncing). Otherwise, use [fast sync](state-sync.md#fast-syncing). - - **Testnet public fullnode**: To sync the entire blockchain history, restore from a [backup](../nodes/full-node/aptos-db-restore.md). Otherwise, download [a snapshot](../nodes/full-node/bootstrap-fullnode.md) or use [fast sync](state-sync.md#fast-syncing). - - **Mainnet public fullnode**: To sync the entire blockchain history, restore from a [backup](../nodes/full-node/aptos-db-restore.md). Otherwise, use [fast sync](state-sync.md#fast-syncing). - - **Mainnet validator or validator fullnode**: To sync the entire blockchain history, restore from a [backup](../nodes/full-node/aptos-db-restore.md). Otherwise, use [fast sync](state-sync.md#fast-syncing). -::: - -## State sync modes - -State sync runs in two modes. All nodes will first bootstrap (in bootstrapping mode) on startup, and then continuously synchronize (in continuous sync mode). - -### Bootstrapping mode - -When the node starts, state sync will perform bootstrapping by using the specified bootstrapping mode configuration. This allows the node to catch up to the Aptos blockchain. There are several bootstrapping modes: - -- **Execute all the transactions since genesis**. In this state sync mode the node will retrieve from the Aptos network all the transactions since genesis, i.e., since the start of the blockchain's history, and re-execute those transactions. Naturally, this synchronization mode takes the longest amount of time. -- **Apply transaction outputs since genesis**. In this state sync mode the node will retrieve all the transactions since genesis but it will skip the transaction execution and will only apply the outputs of the transactions that were previously produced by validator execution. This mode reduces the amount of CPU time required. -- **(Default) Intelligent syncing since genesis**. In this state sync mode the node will retrieve all the transactions since genesis and will either execute the transactions, or apply the transaction outputs, depending on whichever is faster, per data chunk. This allows the node to adapt to CPU and network resource constraints more efficiently. This mode is the default mode. -- **Fast syncing**. In this state sync mode the node will skip the transaction history in the blockchain and will download only the latest blockchain state directly. As a result, the node will not have the historical transaction data, but it will be able to catch up to the Aptos network much more rapidly. - -### Continuous syncing mode - -After the node has bootstrapped and caught up to the Aptos network initially, state sync will then move into continuous syncing mode to stay up-to-date with the blockchain. There are several continuous syncing modes: - -- **Executing transactions**. This state sync mode will keep the node up-to-date by executing new transactions as they are committed to the blockchain. -- **Applying transaction outputs**. This state sync mode will keep the node up-to-date by skipping the transaction execution and only applying the outputs of the transactions as previously produced by validator execution. -- **(Default) Intelligent syncing**. This state sync mode will keep the node up-to-date by either executing the transactions, or applying the transaction outputs, depending on whichever is faster, per data chunk. This allows the node to adapt to CPU and network resource constraints more efficiently. This mode is the default mode. - -## Configuring the state sync modes - -The below sections provide instructions for how to configure your node for different use cases. - -### Executing all transactions - -To execute all the transactions since genesis and continue to execute new -transactions as they are committed, add the following to your node -configuration file (for example,`fullnode.yaml` or `validator.yaml`): - -```yaml - state_sync: - state_sync_driver: - bootstrapping_mode: ExecuteTransactionsFromGenesis - continuous_syncing_mode: ExecuteTransactions -``` - -:::tip Verify node syncing -While your node is syncing, you'll be able to see the -[`aptos_state_sync_version{type="synced"}`](../nodes/full-node/fullnode-source-code-or-docker.md#verify-initial-synchronization) metric gradually increase. -::: - -### Applying all transaction outputs - -To apply all transaction outputs since genesis and continue to apply new -transaction outputs as transactions are committed, add the following to your -node configuration file: - -```yaml - state_sync: - state_sync_driver: - bootstrapping_mode: ApplyTransactionOutputsFromGenesis - continuous_syncing_mode: ApplyTransactionOutputs -``` - -:::tip Verify node syncing -While your node is syncing, you'll be able to see the -[`aptos_state_sync_version{type="synced"}`](../nodes/full-node/fullnode-source-code-or-docker.md#verify-initial-synchronization) metric gradually increase. -::: - -### Intelligent syncing - -To execute or apply all transactions and outputs since genesis (and continue to -do the same as new transactions are committed), add the following to your node -configuration file: - -```yaml - state_sync: - state_sync_driver: - bootstrapping_mode: ExecuteOrApplyFromGenesis - continuous_syncing_mode: ExecuteTransactionsOrApplyOutputs -``` - -This is the default syncing mode on all nodes, as it allows the node to adapt to CPU and network resource constraints more efficiently. - -:::tip Verify node syncing -While your node is syncing, you'll be able to see the -[`aptos_state_sync_version{type="synced"}`](../nodes/full-node/fullnode-source-code-or-docker.md#verify-initial-synchronization) metric gradually increase. -::: - -### Fast syncing - -:::tip Fastest and cheapest method -This is the fastest and cheapest method of syncing your node. It -requires the node to start from an empty state (i.e., not have any existing -storage data). -::: - -:::caution Proceed with caution -Fast sync should only be used as a last resort for validators and -validator fullnodes. This is because fast sync skips all of the blockchain -history and as a result: (i) reduces the data availability in the network; -and (ii) may hinder validator consensus performance if too much data has -been skipped. Thus, validator and validator fullnode operators should be -careful to consider alternate ways of syncing before resorting to fast sync. -::: - -To download the latest blockchain state and continue to apply new -transaction outputs as transactions are committed, add the following to your -node configuration file: - -```yaml - state_sync: - state_sync_driver: - bootstrapping_mode: DownloadLatestStates - continuous_syncing_mode: ExecuteTransactionsOrApplyOutputs -``` - -While your node is syncing, you'll be able to see the -`aptos_state_sync_version{type="synced_states"}` metric gradually increase. -However, `aptos_state_sync_version{type="synced"}` will only increase once -the node has bootstrapped. This may take several hours depending on the -amount of data, network bandwidth and node resources available. - -**Note:** If `aptos_state_sync_version{type="synced_states"}` does not -increase then do the following: -1. Double-check the node configuration file has correctly been updated. -2. Make sure that the node is starting up with an empty storage database -(i.e., that it has not synced any state previously). - -## Running archival nodes - -To operate an archival node, which is a fullnode that contains all blockchain data -since the start of the blockchain's history (that is, genesis), you should: -1. Run a fullnode and configure it to either: (i) execute all transactions; (ii) apply all transaction outputs; or (iii) -use intelligent syncing (see above). Do not select fast syncing, as the fullnode will not contain all data since genesis. -2. Disable the ledger pruner, as described in the [Data Pruning document](data-pruning.md#disabling-the-ledger-pruner). -This will ensure that no data is pruned and the fullnode contains all blockchain data. - -:::caution Proceed with caution -Running and maintaining archival nodes is likely to be expensive and slow -as the amount of data being stored on the fullnode will continuously grow. -::: - - -## Security implications and data integrity -Each of the different syncing modes perform data integrity verifications to -ensure that the data being synced to the node has been correctly produced -and signed by the validators. This occurs slightly differently for -each syncing mode: -1. Executing transactions from genesis is the most secure syncing mode. It will -verify that all transactions since the beginning of time were correctly agreed -upon by consensus and that all transactions were correctly executed by the -validators. All resulting blockchain state will thus be re-verified by the -syncing node. -2. Applying transaction outputs from genesis is faster than executing all -transactions, but it requires that the syncing node trusts the validators to -have executed the transactions correctly. However, all other -blockchain state is still manually re-verified, e.g., consensus messages, -the transaction history and the state hashes are still verified. -3. Fast syncing skips the transaction history and downloads the latest -blockchain state before continuously syncing. To do this, it requires that the -syncing node trust the validators to have correctly agreed upon all -transactions in the transaction history as well as trust that all transactions -were correctly executed by the validators. However, all other blockchain state -is still manually re-verified, e.g., epoch changes and the resulting blockchain states. - -All of the syncing modes get their root of trust from the validator set -and cryptographic signatures from those validators over the blockchain data. -For more information about how this works, see the [state synchronization blogpost](https://medium.com/aptoslabs/the-evolution-of-state-sync-the-path-to-100k-transactions-per-second-with-sub-second-latency-at-52e25a2c6f10). diff --git a/developer-docs-site/docs/guides/system-integrators-guide.md b/developer-docs-site/docs/guides/system-integrators-guide.md deleted file mode 100644 index d243cf7883dc3..0000000000000 --- a/developer-docs-site/docs/guides/system-integrators-guide.md +++ /dev/null @@ -1,542 +0,0 @@ ---- -title: "Integrate with Aptos" -slug: "system-integrators-guide" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Integrate with the Aptos Blockchain - -If you provide blockchain services to your customers and wish to add the Aptos blockchain to your platform, then this guide is for you. This system integrators guide will walk you through all you need to integrate the Aptos blockchain into your platform. - -## Overview - -This document will guide you through the following tasks to integrate with Aptos: -1. Prepare an environment for testing. -1. Create an account on the blockchain. -1. Exchange account identifiers with another entity on the blockchain, for example, to perform swaps. -1. Create a transaction. -1. Obtain a gas estimate and validate the transaction for correctness. -1. Submit the transaction to the blockchain. -1. Wait for the outcome of the transaction. -1. Query historical transactions and interactions for a given account with a specific account, i.e., withdraws and deposits. - -## Getting Started - -In order to get started you'll need to select a network and pick your set of tools. There are also a handful of SDKs to help accelerate development. - -### Choose a network - -There are four well-supported networks for integrating with the Aptos blockchain: - -1. [Local testnet](http://127.0.0.1:8080) -- our standalone tool for local development against a known version of the codebase with no external network. -1. [Devnet](https://fullnode.devnet.aptoslabs.com/v1/spec#/) -- a shared resource for the community, data resets weekly, weekly update from aptos-core main branch. -1. [Testnet](https://fullnode.testnet.aptoslabs.com/v1/spec#/) -- a shared resource for the community, data will be preserved, network configuration will mimic Mainnet. -1. [Mainnet](https://fullnode.mainnet.aptoslabs.com/v1/spec#/) -- a production network with real assets. - -See [Aptos Blockchain Networks](../nodes/networks.md) for full details on each environment. - -### Run a local testnet - -There are two options for running a local testnet: -* [Install the Aptos CLI](../tools/aptos-cli/install-cli/index.md) and 2) run a [local development network](./local-development-network.md). This path is useful for developing on the Aptos blockchain, debugging Move contracts, and testing node operations. Using the CLI you will have a fully featured local development environment including a single node network, the node API, indexer API, and a faucet. -* Directly [run a local testnet](../nodes/local-testnet/run-a-local-testnet.md) using either the [Aptos-core source code](../nodes/local-testnet/run-a-local-testnet.md#using-the-aptos-core-source-code) or a [Docker image](../nodes/local-testnet/run-a-local-testnet.md#using-docker). These paths are useful for testing changes to the Aptos-core codebase or framework, or for building services on top of the Aptos blockchain, respectively. - -Either of these methods will expose a [REST API service](../apis/fullnode-rest-api.md) at `http://127.0.0.1:8080` and a Faucet API service at `http://127.0.0.1:8000` for option 1 run a local testnet or `http://127.0.0.1:8081` for option 2 install the Aptos CLI. The applications will output the location of the services. - -### Production network access - - - - - - - - - - - - - -### SDKs and tools - -Aptos currently provides three SDKs: -1. [Typescript](../sdks/new-ts-sdk/index.md) -2. [Python](../sdks/python-sdk.md) -3. [Rust](../sdks/rust-sdk.md) - -Almost all developers will benefit from exploring the CLI. [Using the CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md) demonstrates how the CLI can be used to which includes creating accounts, transferring coins, and publishing modules. - -## Accounts on Aptos - -An [account](../concepts/accounts.md) represents an entity on the Aptos blockchain that can send transactions. Each account is identified by a particular 32-byte account address and is a container for [Move modules and resources](../concepts/resources.md). On Aptos, accounts must be created on-chain prior to any blockchain operations involving that account. The Aptos framework supports implicitly creating accounts when transferring Aptos coin via [`aptos_account::transfer`](https://github.com/aptos-labs/aptos-core/blob/88c9aab3982c246f8aa75eb2caf8c8ab1dcab491/aptos-move/framework/aptos-framework/sources/aptos_account.move#L18) or explicitly via [`aptos_account::create_account`](https://github.com/aptos-labs/aptos-core/blob/88c9aab3982c246f8aa75eb2caf8c8ab1dcab491/aptos-move/framework/aptos-framework/sources/aptos_account.move#L13). - -At creation, an [Aptos account](https://github.com/aptos-labs/aptos-core/blob/88c9aab3982c246f8aa75eb2caf8c8ab1dcab491/aptos-move/framework/aptos-framework/sources/account.move#L23) contains: -* A [resource containing Aptos Coin](https://github.com/aptos-labs/aptos-core/blob/60751b5ed44984178c7163933da3d1b18ad80388/aptos-move/framework/aptos-framework/sources/coin.move#L50) and deposit and withdrawal of coins from that resource. -* An authentication key associated with their current public, private key(s). -* A strictly increasing [sequence number](../concepts/accounts.md#account-sequence-number) that represents the account's next transaction's sequence number to prevent replay attacks. -* A strictly increasing number that represents the next distinct GUID creation number. -* An [event handle](../concepts/events.md) for all new types of coins added to the account. -* An event handle for all key rotations for the account. - -Read more about [Accounts](../concepts/accounts.md) and [set one up](../tools/aptos-cli/use-cli/use-aptos-cli.md#initialize-local-configuration-and-create-an-account). - -## Transactions - -Aptos [transactions](../concepts/txns-states.md) are encoded in [Binary Canonical Serialization (BCS)](https://github.com/diem/bcs). Transactions contain information such as the sender’s account address, authentication from the sender, the desired operation to be performed on the Aptos blockchain, and the amount of gas the sender is willing to pay to execute the transaction. - -Read more in [Transactions and States](../concepts/txns-states.md). - -### Generating transactions - -Aptos supports two methods for constructing transactions: - -- Using the Aptos client libraries to generate native BCS transactions. -- Constructing JSON-encoded objects and interacting with the REST API to generate native transactions. - -The preferred approach is to directly generate native BCS transactions. Generating them via the REST API enables rapid development at the cost of trusting the fullnode to generate the transaction correctly. - -#### BCS-encoded transactions - -BCS-encoded transactions can be submitted to the `/transactions` endpoint but must specify `Content-Type: application/x.aptos.signed_transaction+bcs` in the HTTP headers. This will return a transaction submission result that, if successful, contains a transaction hash in the `hash` [field](https://github.com/aptos-labs/aptos-core/blob/9b85d41ed8ef4a61a9cd64f9de511654fcc02024/ecosystem/python/sdk/aptos_sdk/client.py#L138). - -#### JSON-encoded transactions - -JSON-encoded transactions can be generated via the [REST API](https://fullnode.devnet.aptoslabs.com/v1/spec#/), following these steps: - -1. First construct an appropriate JSON payload for the `/transactions/encode_submission` endpoint as demonstrated in the [Python SDK](https://github.com/aptos-labs/aptos-core/blob/b0fe7ea6687e9c180ebdbac8d8eb984d11d7e4d4/ecosystem/python/sdk/aptos_sdk/client.py#L128). -1. The output of the above contains an object containing a `message` that must be signed with the sender’s private key locally. -1. Extend the original JSON payload with the signature information and post it to the `/transactions` [endpoint](https://github.com/aptos-labs/aptos-core/blob/b0fe7ea6687e9c180ebdbac8d8eb984d11d7e4d4/ecosystem/python/sdk/aptos_sdk/client.py#L142). This will return a transaction submission result that, if successful, contains a transaction hash in the `hash` [field](https://github.com/aptos-labs/aptos-core/blob/b0fe7ea6687e9c180ebdbac8d8eb984d11d7e4d4/ecosystem/python/sdk/aptos_sdk/client.py#L145). - -JSON-encoded transactions allow for rapid development and support seamless ABI conversions of transaction arguments to native types. However, most system integrators prefer to generate transactions within their own tech stack. Both the [TypeScript SDK](https://github.com/aptos-labs/aptos-core/blob/9b85d41ed8ef4a61a9cd64f9de511654fcc02024/ecosystem/typescript/sdk/src/aptos_client.ts#L259) and [Python SDK](https://github.com/aptos-labs/aptos-core/blob/b0fe7ea6687e9c180ebdbac8d8eb984d11d7e4d4/ecosystem/python/sdk/aptos_sdk/client.py#L100) support generating BCS transactions. - -### Types of transactions - -Within a given transaction, the target of execution can be one of two types: - -- An entry point (formerly known as script function) -- A script (payload) - -Both [Python](https://github.com/aptos-labs/aptos-core/blob/3973311dac6bb9348bfc81cf983c2a1be11f1b48/ecosystem/python/sdk/aptos_sdk/client.py#L256) and [TypeScript](https://github.com/aptos-labs/aptos-core/blob/3973311dac6bb9348bfc81cf983c2a1be11f1b48/ecosystem/typescript/sdk/src/aptos_client.test.ts#L93) support the generation of transactions that target entry points. This guide points out many of those entry points, such as `aptos_account::transfer` and `aptos_account::create_account`. - -Most basic operations on the Aptos blockchain should be available via entry point calls. While one could submit multiple transactions calling entry points in series, such operations benefit from being called atomically from a single transaction. A script payload transaction can call any public (entry) function defined within any module. Here's an example [Move script](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/scripts/two_by_two_transfer) that uses a MultiAgent transaction to extract funds from two accounts and deposit them into two other accounts. This is a [Python example](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/python/sdk/examples/transfer_two_by_two.py) that uses the bytecode generated by compiling that script. Currently there is limited support for script payloads in TypeScript. - -### Status of a transaction - -Obtain transaction status by querying the API [`/transactions/by_hash/{hash}`](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_transaction_by_hash) with the hash returned during the submission of the transaction. - -A reasonable strategy for submitting transactions is to limit their lifetime to 30 to 60 seconds, and polling that API at regular intervals until success or several seconds after that time has elapsed. If there is no commitment on-chain, the transaction was likely discarded. - -### Testing transactions or transaction pre-execution - -To facilitate evaluation of transactions as well as gas estimation, Aptos supports a simulation API that does not require and should not contain valid signatures on transactions. - -The simulation API is a synchronous API that executes a transaction and returns the output inclusive of gas usage. The simulation API can be accessed by submitting a transaction to [`/transactions/simulate`](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/simulate_transaction). - -Both the [Typescript SDK](https://github.com/aptos-labs/aptos-ts-sdk/blob/main/src/api/transactionSubmission/simulate.ts) and [Python SDK](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/python/sdk/examples/simulate_transfer_coin.py) support the simulation API. Note the output and gas used may change based upon the state of the account. For gas estimations, we recommend that the maximum gas amount be larger than the amount quoted by this API. - -## Viewing current and historical state - -Most integrations into the Aptos blockchain benefit from a holistic and comprehensive overview of the current and historical state of the blockchain. Aptos provides historical transactions, state, and events, all the result of transaction execution. - -* Historical transactions specify the execution status, output, and tie to related events. Each transaction has a unique version number associated with it that dictates its global sequential ordering in the history of the blockchain ledger. -* The state is the representation of all transaction outputs up to a specific version. In other words, a state version is the accumulation of all transactions inclusive of that transaction version. -* As transactions execute, they may emit events. [Events](../concepts/events.md) are hints about changes in on-chain data. - -The storage service on a node employs two forms of pruning that erase data from nodes: - -* state -* events, transactions, and everything else - -While either of these may be disabled, storing the state versions is not particularly sustainable. - -Events and transactions pruning can be disabled via setting the [`enable_ledger_pruner`](https://github.com/aptos-labs/aptos-core/blob/cf0bc2e4031a843cdc0c04e70b3f7cd92666afcf/config/src/config/storage_config.rs#L141) to `false`. This is default behavior in Mainnet. In the near future, Aptos will provide indexers that mitigate the need to directly query from a node. - -The REST API offers querying transactions and events in these ways: - -* [Transactions for an account](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_account_transactions) -* [Transaction by version](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_transaction_by_version) -* [Events by event handle](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/get_events_by_event_handle) - -## Exchanging and tracking coins - -Aptos has a standard [Coin type](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move). Different types of coins can be represented in this type through the use of distinct structs that represent the type parameter or generic for `Coin`. - -Coins are stored within an account under the resource `CoinStore`. At account creation, each user has the resource `CoinStore<0x1::aptos_coin::AptosCoin>` or `CoinStore`, for short. Within this resource is the Aptos coin: `Coin`. - -### Transferring coins between users - -Coins, including APT, can be transferred between users via the [`aptos_account::transfer_coins`](https://github.com/aptos-labs/aptos-core/blob/d1610e1bb5214689a37a9cab59cf9254e8eb2be1/aptos-move/framework/aptos-framework/sources/aptos_account.move#L92) function for all coins and [`aptos_account::transfer`](https://github.com/aptos-labs/aptos-core/blob/88c9aab3982c246f8aa75eb2caf8c8ab1dcab491/aptos-move/framework/aptos-framework/sources/aptos_account.move#L18) for Aptos coins. - -:::caution -It is important to note that if an account has not registered a `CoinStore` for a given `T`, then any transfer of type `T` to that account will fail. -::: - -### Current balance for a coin - -The current balance for a `Coin` where `T` is the Aptos coin is available at the account resources URL: `https://{rest_api_server}/accounts/{address}/resource/0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>`. The balance is stored within `coin::amount`. The resource also contains the total number of deposit and withdraw events, and the `counter` value within `deposit_events` and `withdraw_events`, respectively. - -``` -{ - "type": "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", - "data": { - "coin": { - "value": "3927" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xcb2f940705c44ba110cd3b4f6540c96f2634938bd5f2aabd6946abf12ed88457", - "creation_num": "2" - } - } - }, - "withdraw_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xcb2f940705c44ba110cd3b4f6540c96f2634938bd5f2aabd6946abf12ed88457", - "creation_num": "3" - } - } - } - } -} -``` - -### Querying transactions - -In Aptos, each transaction is committed as a distinct version to the blockchain. This allows for the convenience of sharing committed transactions by their version number; to do so, query: `https://{rest_server_api}/transactions/by_version/{version}` - -Transactions submitted by an account can also be queried via the following URL where the `sequence_number` matches the sequence number of the transaction: `https://{rest_server_api}/account/{address}/transactions?start={sequence_number}&limit=1` - -A transfer transaction would appear as follows: - -``` -{ - "version": "13629679", - "gas_used": "4", - "success": true, - "vm_status": "Executed successfully", - "changes": [ - { - "address": "0xb258b91eee04111039320a85b0c24a2dd433909e14a6b5c32ee722e0fdecfddc", - "data": { - "type": "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", - "data": { - "coin": { - "value": "1000" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "creation_num": "2", - } - } - }, - ... - } - }, - "type": "write_resource" - }, - ... - ], - "sender": "0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "sequence_number": "0", - "max_gas_amount": "2000", - "gas_unit_price": "1", - "expiration_timestamp_secs": "1660616127", - "payload": { - "function": "0x1::aptos_account::transfer", - "arguments": [ - "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "1000" - ], - "type": "entry_function_payload" - }, - "events": [ - { - "key": "0x0300000000000000810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "guid": { - "id": { - "addr": "0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "creation_num": "3" - } - } - }, - "sequence_number": "0", - "type": "0x1::coin::WithdrawEvent", - "data": { - "amount": "1000" - } - }, - { - "key": "0x02000000000000005098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - guid": { - "id": { - "addr": "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "creation_num": "2" - } - } - }, - "sequence_number": "0", - "type": "0x1::coin::DepositEvent", - "data": { - "amount": "1000" - } - } - ], - "timestamp": "1660615531147935", - "type": "user_transaction" -} - -``` - -Here is a breakdown of the information in a transaction: -* `version` indicates the globally unique identifier for this transaction, its ordered position in all the committed transactions on the blockchain -* `sender` is the account address of the entity that submitted the transaction -* `gas_used` is the units paid for executing the transaction -* `success` and `vm_status` indicate whether or not the transaction successfully executed and any reasons why it might not have -* `changes` include the final values for any state resources that have been modified during the execution of the transaction -* `events` contain all the events emitted during the transaction execution -* `timestamp` is the near real-time timestamp of the transaction's execution - -If `success` is false, then `vm_status` will contain an error code or message that resulted in the transaction failing to succeed. When `success` is false, `changes` will be limited to gas deducted from the account and the sequence number incrementing. There will be no `events`. - -Each event in `events` is differentiated by a `key`. The `key` is derived from the `guid` in `changes`. Specifically, the `key` is a 40-byte hex string where the first eight bytes (or 16 characters) are the little endian representation of the `creation_num` in the `guid` of the `changes` event, and the remaining characters are the account address. - -As events do not dictate what emitted them, it is imperative to track the path in `changes` to determine the source of an event. In particular, each `CoinStore` has both a `WithdrawEvent` and a `DepositEvent`, based upon the type of coin. In order to determine which coin type is used in a transaction, an indexer can compare the `guid::creation_num` in a `changes` event combined with the address to the `key` for events in `events`. - -Using the above example, `events[1].guid` is equivalent to `changes[0].data.data.deposit_events.guid`, which is `{"addr": "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", "creation_num": "2"}`. - -:::tip -The `key` field will be going away in favor of `guid` -::: - -### Querying events - -Aptos provides clear and canonical events for all withdraw and deposit of coins. This can be used in coordination with the associated transactions to present to a user the change of their account balance over time, when that happened, and what caused it. With some amount of additional parsing, metadata such as the transaction type and the other parties involved can also be shared. - -Query events by handle URL: `https://{rest_api_server}/accounts/{address}/events/0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>/withdraw_events` - -``` -[ - { - "version":"13629679", - "key": "0x0300000000000000cb2f940705c44ba110cd3b4f6540c96f2634938bd5f2aabd6946abf12ed88457", - "guid": { - "id": { - "addr": "0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "creation_num": "3" - } - } - }, - "sequence_number": "0", - "type": "0x1::coin::WithdrawEvent", - "data": { - "amount": "1000" - } - } -] -``` - -Gather more information from the transaction that generated the event by querying `https://{rest_server_api}/transactions/by_version/{version}` where `{version}` is the same value as the `{version}` in the event query. - -:::tip - -When tracking full movement of coins, normally events are sufficient. `0x1::aptos_coin::AptosCoin`, however, requires considering `gas_used` for each transaction sent from the given account since it represents gas in Aptos. To reduce unnecessary overhead, extracting gas fees due to transactions does not emit an event. All transactions for an account can be retrieved from this API: `https://{rest_server_api}/accounts/{address}/transactions` - -::: - -### Tracking coin balance changes - -Consider the transaction from the earlier section, but now with an arbitrary coin `0x1337::my_coin::MyCoin` and some gas parameters changed: -``` -{ - "version": "13629679", - "gas_used": "20", - "success": true, - "vm_status": "Executed successfully", - "changes": [ - { - "address": "0xb258b91eee04111039320a85b0c24a2dd433909e14a6b5c32ee722e0fdecfddc", - "data": { - "type": "0x1::coin::CoinStore<0x1337::my_coin::MyCoin>", - "data": { - "coin": { - "value": "1000" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "creation_num": "2", - } - } - }, - ... - } - }, - "type": "write_resource" - }, - ... - ], - "sender": "0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "sequence_number": "0", - "max_gas_amount": "2000", - "gas_unit_price": "110", - "expiration_timestamp_secs": "1660616127", - "payload": { - "function": "0x1::aptos_account::transfer_coins", - "type_arguments": [ - "0x1337::my_coin::MyCoin" - ], - "arguments": [ - "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "1000" - ], - "type": "entry_function_payload" - }, - "events": [ - { - "key": "0x0300000000000000810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "guid": { - "id": { - "addr": "0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b", - "creation_num": "3" - } - } - }, - "sequence_number": "0", - "type": "0x1::coin::WithdrawEvent", - "data": { - "amount": "1000" - } - }, - { - "key": "0x02000000000000005098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - guid": { - "id": { - "addr": "0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e", - "creation_num": "2" - } - } - }, - "sequence_number": "0", - "type": "0x1::coin::DepositEvent", - "data": { - "amount": "1000" - } - } - ], - "timestamp": "1660615531147935", - "type": "user_transaction" -} -``` - -There are three balance changes in this transaction: -1. A withdrawal of `1000` of `0x1337::my_coin::MyCoin` from the transaction sending account `0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b` -2. A deposit of `1000` of `0x1337::my_coin::MyCoin` to receiving account `0x5098df8e7969b58ab3bd2d440c6203f64c60a1fd5c08b9d4abe6ae4216246c3e` -3. A gas fee `2200` of `0x1::aptos_coin::AptosCoin` from the sending account `0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b` - -To retrieve the withdrawal information: -1. Scan the `changes` for `0x1::coin::CoinStore`. Note the `CoinType` is a generic signifying which coin is stored in the store. In this example, the `CoinType` is `0x1337::my_coin::MyCoin`. -2. Retrieve the `guid` for `withdraw_events`. In this example, the `guid` contains `addr` `0x810026ca8291dd88b5b30a1d3ca2edd683d33d06c4a7f7c451d96f6d47bc5e8b` and `creation_num` `3`. -3. Scan for events with this `guid` and extract the event associated with it. In this example, it is the `0x1::coin::WithdrawEvent`. -4. Note the `amount` field will be the number of `CoinType` removed from the account in the `guid`. In this example, it is `1000`. - -To retrieve the deposit information, it's the same as withdrawal except: -1. The `guid` used is under `deposit_events` -2. The `amount` will be a positive increase on the account's balance. -3. The event's name will be: `0x1::coin::DepositEvent` - -To retrieve the gas fee: -1. The `gas_used` field must be multiplied times the `gas_unit_price`. In this example, `gas_used=20` and `gas_unit_price=110` so the total gas coins withdrawn is `2200`. -2. Gas is always: `0x1::aptos_coin::AptosCoin` - -To retrieve information about the number of decimals of the coin: -1. You can retrieve the number of decimals for a coin via its: `0x1::coin::CoinInfo` -2. This will be located at the address of the coin type. In this example, you would need to look up `0x1::coin::CoinInfo<0x1337::my_coin::MyCoin>` at address `0x1337`. - -:::tip -If you always use the events in this manner, you won't miss any balance changes for an account. -By monitoring the events, you will find all balance changes in the `0x1::coin::CoinStore`: -1. Coin mints -2. Coin burns -3. Coin transfers -4. Staking coins -5. Withdrawing staked coins -6. Transfers not derived from `coin::transfer` - -::: - -To create some sample data to explore, conduct ["Your first transaction"](../tutorials/first-transaction.md). - -To learn more about coin creation, make ["Your First Coin"](../tutorials/first-coin.md). - -## Integrating with the faucet - -This tutorial is for SDK and wallet developers who want to integrate with the [Aptos Faucet](https://github.com/aptos-labs/aptos-core/tree/main/crates/aptos-faucet). If you are a dapp developer, you should access the faucet through an existing [SDK](../tutorials/first-transaction.md) or [CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md#initialize-local-configuration-and-create-an-account) instead. - -### Differences between devnet and testnet -What are the differences between devnet and testnet? Effectively none. In the past, the testnet faucet had a Captcha in front of it, making it unqueryable by normal means. This is no longer true. - -The endpoints for each faucet are: -- Devnet: https://faucet.devnet.aptoslabs.com -- Testnet: https://faucet.testnet.aptoslabs.com - -### Calling the faucet: JavaScript / TypeScript -If you are building a client in JavaScript or TypeScript, you should make use of the [@aptos-labs/aptos-faucet-client](https://www.npmjs.com/package/@aptos-labs/aptos-faucet-client) package. This client is generated based on the OpenAPI spec published by the faucet service. - -Example use: -```typescript -import { - AptosFaucetClient, - FundRequest, -} from "@aptos-labs/aptos-faucet-client"; - -async function callFaucet(amount: number, address: string): Promise { - const faucetClient = new AptosFaucetClient({BASE: "https://faucet.devnet.aptoslabs.com"}); - const request: FundRequest = { - amount, - address, - }; - const response = await faucetClient.fund({ requestBody: request }); - return response.txn_hashes; -} -``` - -### Calling the faucet: Other languages -If you are trying to call the faucet in other languages, you have two options: -1. Generate a client from the [OpenAPI spec](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-faucet/doc/spec.yaml). -2. Call the faucet on your own. - -For the latter, you will want to build a query similar to this: -``` -curl -X POST 'https://faucet.devnet.aptoslabs.com/mint?amount=10000&address=0xd0f523c9e73e6f3d68c16ae883a9febc616e484c4998a72d8899a1009e5a89d6' -``` - -This means mint 10000 OCTA to address `0xd0f523c9e73e6f3d68c16ae883a9febc616e484c4998a72d8899a1009e5a89d6`. diff --git a/developer-docs-site/docs/guides/transaction-management.md b/developer-docs-site/docs/guides/transaction-management.md deleted file mode 100644 index b3d1facf1b9bd..0000000000000 --- a/developer-docs-site/docs/guides/transaction-management.md +++ /dev/null @@ -1,107 +0,0 @@ -# Transaction Management - -This guide explains how to build a transaction management harness that can scale on the Aptos blockchain. - -## Background - -In Aptos, transactions are mapped back to an account in terms of the entity that signs or authorizes that transaction and provides an account-based sequence number. When the Aptos network receives a new transaction, several rules are followed with respect to this: - -- The transaction sent from an account must be authorized correctly by that account. -- The current time as defined by the most recent ledger update must be before the expiration timestamp of the transaction. -- The transaction's sequence number must be equal to or greater than the sequence number on-chain for that account. - -Once the initial node has accepted a transaction, the transaction makes its way through the system by an additional rule. If a transactions sequence number is higher than the current on-chain sequence number, it can only progress toward consensus if every node in the path has seen a transaction with the sequence number between the on-chain state and the current sequence number. - -Example: - -Alice owns an account whose current on-chain sequence number is 5. - -Alice submits a transaction to node Bob with sequence number 6. - -Bob the node accepts the transaction but does not forward it, because Bob has not seen 5. - -In order to make progress, Alice must either send Bob transaction number 5 or Bob must be notified from consensus that 5 was committed. In the latter, Alice submitted the transaction through another node. - -Beyond this there are two remaining principles: - -- A single account can have at most 100 uncommitted transactions submitted to the blockchain. Any more than that and the transactions will be rejected. This can happen silently if Alice submits the first 100 to Bob the node and the next 100 to Carol the node. If both those nodes share a common upstream, then that upstream will accept Alice's 100 sent via Bob but silently reject Alice's 100 sent via Carol. -- Submitting to distinct transactions to multiple nodes will result in slow resolution as transactions will not make progress from the submitted node until the submitted knows that all preceding transactions have been committed. For example, if Alice sends the first 50 via Bob and the next 50 via Carol. - -## Building a Transaction Manager - -Now that we understand the nuances of transactions, let's dig into building a robust transaction manager. This consists of the following core components: - -- A sequence number generator that allocates and manages available sequence numbers for a single account. -- A transaction manager that receives payloads from an application or a user, sequence numbers from the sequence number generator, and has access to the account key to combine the three pieces together into a viable signed transaction. It then also takes the responsibility for pushing the transaction to the blockchain. -- An on-chain worker, leader harness that lets multiple accounts share the signer of a single shared account. - -Currently this framework assumes that the network builds no substantial queue, that is a transaction that is submitted executes and commits with little to no delay. In order to address high demand, this work needs to be extended with the following components: - -- Optimizing `base_gas_unit` price to ensure priority transactions can be committed to the blockchain. -- Further handling of transaction processing rates to ensure that the expiration timer is properly set. -- Handling of transaction failures to either be ignored or resubmitted based upon desired outcome. - -Note, an account should be managed by a single instance of the transaction manager. Otherwise each instance of the transaction manager will likely have stale in-memory state resulting in overlapping sequence numbers. - -### Implementations - -- Python - - [Sequence number manager](https://github.com/aptos-labs/aptos-core/pull/7987) - - [Transaction manager](https://github.com/aptos-labs/aptos-core/pull/7987) -- [Worker-leader smart contract](https://github.com/aptos-labs/aptos-core/pull/7986) - -### Managing Sequence Numbers - -Each transaction requires a distinct sequence number that is sequential to previously submitted transactions. This can be provided by the following process: - -1. At startup, query the blockchain for the account’s current sequence number. -2. Support up to 100 transactions in flight at the same time, that is 100 sequence numbers can be allocated without confirming that any have been committed. -3. If there are 100 transactions in flight, determine the actual committed state by querying the network. This will update the current sequence number. -4. If there are less than 100 transactions in flight, return to step 2. -5. Otherwise sleep for .1 seconds and continue to re-evaluate the current on-chain sequence number. -6. All transactions should have an expiration time. If the expiration time has passed, assume that there has been a failure and reset the sequence number. The trivial case is to only monitor for failures when the maximum number of transactions are in flight and to let other services manages this otherwise. - -In parallel, monitor new transactions submitted. Once the earliest transaction expiration time has expired synchronize up to that transaction. Then repeat the process for the next transaction. - -If there is any failure, wait until all outstanding transactions have timed out and leave it to the application to decide how to proceed, e.g., replay failed transactions. The best method for waiting for outstanded transactions is first to query the ledger timestamp and ensure it is at least elapsed the maximum timeout from the last transactions submit time. From there, validate with mempool that all transactions since the last known committed transaction are either committed or no longer exist within the mempool. This can be done by querying the REST API for transactions of a specific account, specifying the currently being evaluated sequence number and setting a limit to 1. Once these checks are complete, the local transaction number can be resynchronized. - -These failure handling steps are critical for the following reasons: -* Mempool does not immediate evict expired transactions. -* A new transaction cannot overwrite an existing transaction, even if it is expired. -* Consensus, i.e., the ledger timestamp, dictates expirations, the local node will only expire after it sees a committed timestamp after the transactions expiration time and a garbage collection has happened. - -### Managing Transactions - -Once a transaction has been submitted it goes through a variety of steps: - -1. Submission to a REST endpoint. -2. Pre-execution validation in the Mempool during submission. -3. Transmission from Mempool to Mempool with pre-execution validation happening on each upstream node. -4. Inclusion in a consensus proposal. -5. One more pre-execution validation. -6. Execution and committing to storage. - -There are many potential failure cases that must be considered: - -- Failure during transaction submission (1 and 2): - - Visibility: The application will receive an error either that the network is unavailable or that the transaction failed pre-execution validation. - - If the error is related to availability or duplicate sequence numbers, wait until access is available and the sequence number has re-synchronized. - - Pre-execution validation failures are currently out of scope, outside of those related to duplicate sequence numbers, account issues are likely related to an invalid key for the account or the account lacks sufficient funds for gas. -- Failure between submission and execution (3, 4, and 5): - - Visibility: Only known by waiting until the transaction has expired. - - These are the same as other pre-execution validation errors due to changes to the account as earlier transactions execute. It is likely either duplicate sequence numbers or the account lacks sufficient funds for gas. -- Failure during execution (6): - - Visibility: These are committed to the blockchain. - - These errors occur as a result of on-chain state issues, these tend to be application specific, such as an auction where a new bid might not actually be higher than the current bid. - -### Workers and Identity - -Using the above framework, a single account can push upwards of 100 transactions from the start of a block to the end of a block. Assuming that all 100 transactions are consumed within 1 block, it will take a bit of time for the next 100 slots to be available. This is due to the network delays as well as the multi-staged validator pipeline. - -To fully leverage the blockchain for massive throughput, using a single user account is not enough. Instead, Aptos supports the concept of worker accounts that can share the responsibility of pushing work through a shared account, also known as a resource account. - -In this model, each worker has access to the `SignerCap` of the shared account, which enables them to impersonate the shared account or generate the `signer` for the shared account. Upon gaining the `signer`, the transaction can execute the logic that is gated by the signer of the shared account. - -Another model, if viable, is to decouple the `signer` altogether away from permissions and to make an application specific capability. Then this capability can be given to each worker that let’s them operate on the shared infrastructure. - -Note that parallelization on the shared infrastructure can be limited if any transaction would have any read or write conflicts. This won’t prevent multiple transactions from executing within a block, but can impact maximum blockchain performance. diff --git a/developer-docs-site/docs/index.md b/developer-docs-site/docs/index.md deleted file mode 100644 index b786679e8d8e5..0000000000000 --- a/developer-docs-site/docs/index.md +++ /dev/null @@ -1,148 +0,0 @@ ---- -title: "Aptos Developer Documentation" -slug: "/" -hidden: false -sidebar_position: 0 -hide_table_of_contents: true ---- - -# Aptos Developer Documentation - -Welcome! Aptos is a Layer 1 for everyone. In the [Ohlone language](https://en.wikipedia.org/wiki/Ohlone_languages), ["Aptos"](https://en.wikipedia.org/wiki/Aptos,_California) means "The People." This site is here to help you grow a [web3 ecosystem project](https://github.com/aptos-foundation/ecosystem-projects) that benefits the entire world through easier development, more reliable services, faster transactions, and a supportive, decentralized family. - -This documentation will help you develop applications for the Aptos blockchain, run nodes, and be a part of the blossoming Aptos community. This documentation covers both basic and advanced topics. Here you will find concepts, how-to guides, quickstarts, tutorials, API references, code examples, release notes, and more. - -> Please note, this site is built from the `main` upstream branch of GitHub and so therefore reflects the latest changes to Aptos. If you have checked out [another branch](https://github.com/aptos-labs/aptos-core/branches) to use a [specific network](guides/system-integrators-guide.md#choose-a-network), the code may not yet have all of the features described here. - -## Find the latest releases - -See the newest Aptos releases in the [Latest Releases](./releases/index.md) list and its subpages. - -## Set up your environment and start with the tutorials - -
-
- -
-
-

Getting started

-

Begin by setting up your Aptos development environment.

-
-
-
-
-

Run quickstart tutorials

-

Submit your first transaction, write your first Move module, deploy your first coin, and more.

-
-
-
-
- -## Learn Aptos concepts and follow the guides - -
-
-
-
-

Concepts

-

Learn the key concepts of the Aptos blockchain, including transactions, accounts, gas and transaction fees, nodes, and more.

-
-
-
-
-

Move on Aptos

-

Learn how the Move programming language helps make the Aptos blockchain safe and reliable.

-
-
-
-
-

How it works

-

How to build applications on Aptos.

-
-
-
-
- -## Run an Aptos node - -
-
-
-
-

Run a validator node

-

Install and run a validator or a fullnode.

-
-
-
-
-

Run a local testnet

-

Run a validator node on your local testnet.

-
-
-
-
-

Run a public fullnode

-

Run a public fullnode and connect to devnet.

-
-
- -
-
- -## Develop apps with the SDKs and APIs - -
-
-
-
-
-

CLI

-

CLI for developing and deploying on the Aptos blockchain.

-
-
-
-
-
-
-

SDKs

-

SDKs for building on the Aptos blockchain.

-
-
-
-
-
-
-

REST API

-

Reference for the REST API to interact with the Aptos blockchain.

-
-
-
-
-
- -## Connect to an Aptos network - -Aptos offers the ability to run a local testnet, as well as provides a shared devnet and testnet. See the [System Integrators Guide](guides/system-integrators-guide.md#networks) for a summary of the available networks and the means to connect to them. - -:::tip Aptos Devnet Resets -The Aptos devnet is reset every Thursday. See the latest updates in the [Aptos Discord](https://discord.gg/aptosnetwork). -:::info - -::: - -## Find the ecosystem - -We are excited that you are here, and we look forward to getting to know you. Welcome to the Aptos community! Find out more about us and exchange ideas at: - -* [Discord](https://discord.gg/aptosnetwork) -* [Stack Overflow](https://stackoverflow.com/questions/tagged/aptos) -* [Forum](https://forum.aptoslabs.com/) -* [Medium](https://medium.com/aptoslabs) -* [Telegram](https://t.me/aptos_official) -* [Twitter](https://twitter.com/Aptos_Network) - -## Community projects on Aptos - -Here's a [list of community-maintained projects](https://github.com/aptos-foundation/ecosystem-projects) collected by the [Aptos Foundation](https://aptosfoundation.org/). If you have a project that you want added to the list, just edit the page and add a GitHub pull request. - -Want to pitch in on smaller tasks, such as doc updates and code fixes? See our [Community](./community/index.md) list for opportunities to help the Aptos ecosystem. diff --git a/developer-docs-site/docs/indexer/api/example-queries.md b/developer-docs-site/docs/indexer/api/example-queries.md deleted file mode 100644 index e81339ed8102a..0000000000000 --- a/developer-docs-site/docs/indexer/api/example-queries.md +++ /dev/null @@ -1,202 +0,0 @@ ---- -title: "Example Queries" ---- - -# Example Indexer API Queries - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -## Running example queries - -1. Open the Hasura Explorer for the network you want to query. You can find the URLs [here](/indexer/api/labs-hosted#hasura-explorer). -1. Paste the **Query** code from an example into the main query section, and the **Query Variables** code from the same example into the Query Variables section (below the main query section). - -## More Examples -You can find many more example queries in the [TypeScript SDK](https://github.com/aptos-labs/aptos-ts-sdk/tree/main/src/internal/queries). Indeed if you're using the TypeScript SDK, you should look at the [API](https://github.com/aptos-labs/aptos-ts-sdk/tree/main/src/api). - -## Example Token Queries - -Getting all tokens currently in account. - -**Query** - -```graphql -query CurrentTokens($owner_address: String, $offset: Int) { - current_token_ownerships( - where: {owner_address: {_eq: $owner_address}, amount: {_gt: "0"}, table_type: {_eq: "0x3::token::TokenStore"}} - order_by: [{last_transaction_version: desc}, {token_data_id: desc}] - offset: $offset - ) { - token_data_id_hash - name - collection_name - property_version - amount - } -} -``` - -**Query Variables** -```json -{ - "owner_address": "0xaa921481e07b82a26dbd5d3bc472b9ad82d3e5bfd248bacac160eac51687c2ff", - "offset": 0 -} -``` - ---- - -Getting all token activities for a particular token. **Note** that to get the `token_id_hash` you have to first make a query to get the token from the above query. - -**Query** - -```graphql -query TokenActivities($token_id_hash: String, $offset: Int) { - token_activities( - where: {token_data_id_hash: {_eq: $token_id_hash}} - # Needed for pagination - order_by: [{last_transaction_version: desc}, {event_index: asc}] - # Optional for pagination - offset: $offset - ) { - transaction_version - from_address - property_version - to_address - token_amount - transfer_type - } -} -``` - -**Query Variables** - -```json -{ - "token_id_hash": "f344b838264bf9aa57d5d4c1e0c8e6bbdc93f000abe3e7f050c2a0f4dc23d030", - "offset": 0 -} -``` - ---- - -Getting current token offered to account. - -**Query** - -```graphql -query CurrentOffers($to_address: String, $offset: Int) { - current_token_pending_claims( - where: {to_address: {_eq: $to_address}, amount: {_gt: "0"}} - # Needed for pagination - order_by: [{last_transaction_version: desc}, {token_data_id: desc}] - # Optional for pagination - offset: $offset - ) { - token_data_id_hash - name - collection_name - property_version - from_address - amount - } -} -``` - -** Query Variables** - -```json -{ - "to_address": "0xe7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb", - "offset": 0 -} -``` - -## Example Coin Queries - -Getting coin activities (including gas fees). - -**Query** - -```graphql -query CoinActivity($owner_address: String, $offset: Int) { - coin_activities( - where: {owner_address: {_eq: $owner_address}} - # Needed for pagination - order_by: [{last_transaction_version: desc}, {event_index: asc}] - # Optional for pagination - offset: $offset - ) { - activity_type - amount - coin_type - entry_function_id_str - transaction_version - } -} -``` - -**Query Variables** - -```json -{ - "owner_address": "0xe7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb", - "offset": 0 -} -``` - ---- - -Currently owned coins (`0x1::coin::CoinStore`). - -**Query** - -```graphql -query CurrentBalances($owner_address: String, $offset: Int)Ï { - current_coin_balances( - where: {owner_address: {_eq: $owner_address}} - # Needed for pagination - order_by: [{last_transaction_version: desc}, {token_data_id: desc}] - # Optional for pagination - offset: $offset - ) { - owner_address - coin_type - amount - last_transaction_timestamp - } -} -``` - -**Query Variables** - -```json -{ - "owner_address": "0xe7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb", - "offset": 0 -} -``` - -## Example Explorer Queries - -Getting all user transaction versions (to filter on user transaction for block explorer). - -**Query** - -```graphql -query UserTransactions($limit: Int) { - user_transactions(limit: $limit, order_by: {version: desc}) { - version - } -} -``` - -**Query Variables** - -```json -{ - "limit": 10 -} -``` diff --git a/developer-docs-site/docs/indexer/api/index.md b/developer-docs-site/docs/indexer/api/index.md deleted file mode 100644 index 82e20f7388ca2..0000000000000 --- a/developer-docs-site/docs/indexer/api/index.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -title: "Indexer API" ---- - -import BetaNotice from '../../../src/components/\_indexer_beta_notice.mdx'; - - - -This section contains documentation for the Aptos Indexer API, the API built upon the standard set of processors provided in the [aptos-labs/aptos-indexer-processors](https://github.com/aptos-labs/aptos-indexer-processors) repo. - -## Usage Guide - -### Address Format - -When making a query where one of the query params is an account address (e.g. owner), make sure the address starts with a prefix of `0x` followed by 64 hex characters. For example: `0xaa921481e07b82a26dbd5d3bc472b9ad82d3e5bfd248bacac160eac51687c2ff`. - -### TypeScript Client - -The Aptos TypeScript SDK provides an API functions for making queries to the Aptos Indexer API. Learn more [here](../../sdks/new-ts-sdk/fetch-data-from-chain.md). diff --git a/developer-docs-site/docs/indexer/api/labs-hosted.md b/developer-docs-site/docs/indexer/api/labs-hosted.md deleted file mode 100644 index 06d518a8be5ce..0000000000000 --- a/developer-docs-site/docs/indexer/api/labs-hosted.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: "Labs-Hosted Indexer API" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -## GraphQL API Endpoints - -When making GraphQL queries to the Labs-Hosted Indexer API, use the following endpoints: - -- **Mainnet:** https://indexer.mainnet.aptoslabs.com/v1/graphql -- **Testnet:** https://indexer-testnet.staging.gcp.aptosdev.com/v1/graphql -- **Devnet:** https://indexer-devnet.staging.gcp.aptosdev.com/v1/graphql - -## Hasura Explorer - -The following URLs are for the Hasura Explorer for the Labs-Hosted Indexer API: - -- **Mainnet:** https://cloud.hasura.io/public/graphiql?endpoint=https://indexer.mainnet.aptoslabs.com/v1/graphql -- **Testnet:** https://cloud.hasura.io/public/graphiql?endpoint=https://indexer-testnet.staging.gcp.aptosdev.com/v1/graphql -- **Devnet:** https://cloud.hasura.io/public/graphiql?endpoint=https://indexer-devnet.staging.gcp.aptosdev.com/v1/graphql - -## Rate limits - -The following rate limit applies for the Aptos Labs hosted indexer API: - -- For a web application that calls this Aptos-provided indexer API directly from the client (for example, wallet or explorer), the rate limit is currently 5000 requests per five minutes by IP address. **Note that this limit can change with or without prior notice.** - -If you need a higher rate limit, consider running the Aptos Indexer API yourself. See the guide to self hosting [here](/indexer/api/self-hosted). diff --git a/developer-docs-site/docs/indexer/api/self-hosted.md b/developer-docs-site/docs/indexer/api/self-hosted.md deleted file mode 100644 index bc903af8fbe21..0000000000000 --- a/developer-docs-site/docs/indexer/api/self-hosted.md +++ /dev/null @@ -1,95 +0,0 @@ ---- -title: "Self-Hosted Indexer API" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -This guide will walk you through setting up a self-hosted Indexer API. - -:::caution -Currently this guide only explains how to run processor part of the Indexer API. By the end of this guide you will have a running processor that consumes transactions from the Transaction Stream Service, parses them, and stores them in the database. Unfortunately this guide does not explain how to attach an API to this system right now. -::: - -## Prerequisites - -- A running PostgreSQL instance is required, with a valid user and database. In this example we call the user `postgres` and the database `indexer`. -- If you wish to use Docker, you must have Docker installed. [Installation Guide](https://docs.docker.com/get-docker/). - - -## Configuration -To run the service we need to define a config file. We will start with this template: - -```yaml -health_check_port: 8084 -server_config: - processor_config: - type: default_processor - postgres_connection_string: postgresql://postgres:@localhost:5432/indexer - indexer_grpc_data_service_address: 127.0.0.1:50051 - indexer_grpc_http2_ping_interval_in_secs: 60 - indexer_grpc_http2_ping_timeout_in_secs: 10 - auth_token: AUTH_TOKEN -``` - -From here you will likely want to change the values of some of these fields. Let's go through some of them. - -### `processor_name` -:::info -A single instance of the service only runs a single processor. If you want to run multiple processors, you must run multiple instances of the service. In this case, it is up to you whether to use the same database or not. -::: - -This is the processor you want to run. You can see what processors are available [here](https://github.com/aptos-labs/aptos-indexer-processors/blob/main/rust/processor/src/processors/mod.rs#L23). Some examples: -- `coin_processor` -- `ans_processor` -- `token_v2_processor` - -### `postgres_connection_string` -This is the connection string to your PostgreSQL database. It should be in the format `postgresql://:@:/`. - -### `indexer_grpc_data_service_address` -This is the URL for the Transaction Stream Service. If you are using the Labs-Hosted instance you can find the URLs for each network at [this page](../txn-stream/labs-hosted). Make sure to select the correct URL for the network you want to index. If you are running this service locally the value should be `127.0.0.1:50051`. - -### `auth_token` -This is the auth token used to connect to the Transaction Stream Service. If you are using the Labs-Hosted instance you can use the API Gateway to get an auth token. Learn more at [this page](/indexer/txn-stream/labs-hosted). - -## Run with source code -Clone the repo: -``` -# SSH -git clone git@github.com:aptos-labs/aptos-indexer-processors.git - -# HTTPS -git clone https://github.com/aptos-labs/aptos-indexer-processors.git -``` - -Navigate to the directory for the service: -``` -cd aptos-indexer-processors -cd rust/processor -``` - -Run the service: -``` -cargo run --release -- -c config.yaml -``` - -## Run with Docker - - -To run the service with Docker, use the following command: -``` -docker run -it --network host --mount type=bind,source=/tmp/config.yaml,target=/config.yaml aptoslabs/indexer-processor-rust -c /config.yaml -``` - -This command binds the container to the host network and mounts the config file from the host into the container. This specific invocation assumes that your config file in the host is at `/tmp/config.yaml`. - -See the image on DockerHub here: https://hub.docker.com/r/aptoslabs/indexer-processor-rust/tags. diff --git a/developer-docs-site/docs/indexer/api/usage-guide.md b/developer-docs-site/docs/indexer/api/usage-guide.md deleted file mode 100644 index 80e976c9bbfda..0000000000000 --- a/developer-docs-site/docs/indexer/api/usage-guide.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "Indexer API Usage Guide" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -Coming soon! - - diff --git a/developer-docs-site/docs/indexer/custom-processors/e2e-tutorial.md b/developer-docs-site/docs/indexer/custom-processors/e2e-tutorial.md deleted file mode 100644 index c91013b8d5277..0000000000000 --- a/developer-docs-site/docs/indexer/custom-processors/e2e-tutorial.md +++ /dev/null @@ -1,384 +0,0 @@ ---- -title: "End-to-End Tutorial" ---- - -# Creating a Custom Indexer Processor - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -In this tutorial, we're going to walk you through all the steps involved with creating a very basic custom indexer processor to track events and data on the Aptos blockchain. - -We use a very simple smart contract called **Coin Flip** that has already emitted events for us. - -The smart contract is already deployed, and you mostly don't need to understand it unless you're curious to mess with it or change things. - -## Getting Started - -To get started, clone the [aptos-indexer-processors](https://github.com/aptos-labs/aptos-indexer-processors) repo: -``` -# HTTPS -https://github.com/aptos-labs/aptos-indexer-processors.git - -# SSH -git@github.com:aptos-labs/aptos-indexer-processors.git -``` - -Navigate to the coin flip directory: -``` -cd aptos-indexer-processors -cd python/processors/coin_flip -``` - -Processors consume a stream of transactions from the Transaction Stream Service. In order to use the Labs-Hosted Transaction Stream Service you need an auth token. Follow [this guide](/indexer/txn-stream/labs-hosted#auth-tokens) to guide to get one. Once you're done, you should have a token that looks like this: -``` -aptoslabs_yj4bocpaKy_Q6RBP4cdBmjA8T51hto1GcVX5ZS9S65dx -``` - -You also need the following tools: -- The [Aptos CLI](/tools/aptos-cli/install-cli) -- Python 3.9+: [Installation Guide](https://docs.python-guide.org/starting/installation/#python-3-installation-guides). -- Poetry: [Installation Guide](https://python-poetry.org/docs/#installation). - -We use postgresql as our database in this tutorial. You're free to use whatever you want, but this tutorial is geared towards postgresql for the sake of simplicity. We use the following database configuration and tools: -- [Postgresql](https://www.postgresql.org/download/) - - We will use a database hosted on `localhost` on the port `5432`, which should be the default. - - When you create your username, keep track of it and the password you use for it. - - You can view a tutorial for installing postgresql and psql [here](https://www.digitalocean.com/community/tutorials/how-to-install-postgresql-on-ubuntu-22-04-quickstart) tool to set up your database more quickly. - - If you want to easily view your database data, consider using a GUI like [DBeaver](https://dbeaver.io/), [pgAdmin](https://www.pgadmin.org/), or [Postico](https://eggerapps.at/postico2/). - -Explaining how to create a database is beyond the scope of this tutorial. If you are not sure how to do it, consider checking out tutorials on how to create a database with the `psql` tool. - -## Setup your environment - -### Setup the postgresql database - -Make sure to start the `postgresql` service: - -The command for Linux/WSL might be something like: - -```shell -sudo service postgresql start -``` - -For mac, if you're using brew, start it up with: - -```shell -brew services start postgresql -``` - -Create your database with the name `coin_flip`, where our username is `user` and our password is `password`. - -If your database is set up correctly, and you have the `psql` tool, you should be able to run the command `psql -d coin_flip`. - -### Setup your local environment with poetry and grpc - -If you haven't yet, make sure to read the introductory [custom processor guide](https://github.com/aptos-labs/aptos-indexer-processors). - -You can also check out the python-specific broad overview of how to create an indexer processor [here](https://github.com/aptos-labs/aptos-indexer-processors/tree/main/python). - -## Configure your indexer processor - -Now let's setup the configuration details for the actual indexer processor we're going to use. - -### Setup your config.yaml file - -Copy the contents below and save it to a file called `config.yaml`. Save it in the `coin_flip` folder. Your file directory structure should look something like this: - -``` -- indexer - - python - - aptos_ambassador_token - - aptos-tontine - - coin_flip - - move - - sources - - coin_flip.move - - package_manager.move - - Move.toml - - config.yaml <-------- Edit this config.yaml file - - models.py - - processor.py - - README.md - - example_event_processor - - nft_marketplace_v2 - - nft_orderbooks - __init__.py - main.py - README.md - - rust - - scripts - - typescript -``` - -Once you have your config.yaml file open, you only need to change one field, if you just want to run the processor as is: -```yaml -grpc_data_stream_api_key: "" -``` - -### More customization with config.yaml - -However, if you'd like to customize things further, you can change some of the other fields. - -If you'd like to start at a specific version, you can specify that in the config.yaml file with: -```yaml -starting_version_default: 123456789 -``` - -This is the transaction version the indexer starts looking for events at. If the indexer has already processed transactions past this version, **it will skip all of them and go to the latest version stored.** - -The rows in `next_versions_to_process` are the `indexer_name` as the primary key and the `next_version` to process field, along with the `updated_at`. - -If you want to **force** the indexer to backfill data (overwrite/rewrite data) from previous versions even though it's already indexed past it, you can specify this in the config.yaml file with: - -```yaml -starting_version_backfill: 123456789 -``` - -If you want to use a different network, change the `grpc_data_stream_endpoint` field to the corresponding desired value: - -```yaml -devnet: 35.225.218.95:50051 -testnet: 35.223.137.149:50051 # north america -testnet: 34.64.252.224:50051 # asia -mainnet: 34.30.218.153:50051 -``` - -If these ip addresses don't work for you, they might be outdated. Check out the `README.md` at the root folder of the repository for the latest endpoints. - -If you're using a different database name or processor name, change the `processor_name` field and the `db_connection_uri` to your specific needs. Here's the general structure of the field: - -```yaml -db_connection_uri: "postgresql://username:password@database_url:port_number/database_name" -``` - -### Add your processor & schema names to the configuration files - -First, let's create the name for the database schema we're going to use. We use `coin_flip` in our example, so we need to add it in two places: - -1. We need to add it to our `python/utils/processor_name.py` file: -```python - class ProcessorName(Enum): - EXAMPLE_EVENT_PROCESSOR = "python_example_event_processor" - NFT_MARKETPLACE_V1_PROCESSOR = "nft_marketplace_v1_processor" - NFT_MARKETPLACE_V2_PROCESSOR = "nft_marketplace_v2_processor" - COIN_FLIP = "coin_flip" -``` -2. Add it to the constructor in the `IndexerProcessorServer` match cases in `utils/worker.py`: - -```python -match self.config.processor_name: - case ProcessorName.EXAMPLE_EVENT_PROCESSOR.value: - self.processor = ExampleEventProcessor() - case ProcessorName.NFT_MARKETPLACE_V1_PROCESSOR.value: - self.processor = NFTMarketplaceProcesser() - case ProcessorName.NFT_MARKETPLACE_V2_PROCESSOR.value: - self.processor = NFTMarketplaceV2Processor() - case ProcessorName.COIN_FLIP.value: - self.processor = CoinFlipProcessor() -``` - -3. Add it to the `python/utils/models/schema_names.py` file: - -```python -EXAMPLE = "example" -NFT_MARKETPLACE_SCHEMA_NAME = "nft_marketplace" -NFT_MARKETPLACE_V2_SCHEMA_NAME = "nft_marketplace_v2" -COIN_FLIP_SCHEMA_NAME = "coin_flip" -``` - -### Explanation of the event emission in the Move contract - -In our Move contract (in `coin_flip/move/sources/coin_flip.move`), each user has an object associated with their account. The object has a `CoinFlipStats` resource on it that tracks the total number of wins and losses a user has and is in charge of emitting events. - -```rust -// CoinFlipStats object/resource definition -#[resource_group_member(group = aptos_framework::object::ObjectGroup)] -struct CoinFlipStats has key { - wins: u64, - losses: u64, - event_handle: EventHandle, // - delete_ref: DeleteRef, -} - -// event emission in `flip_coin` -fun flip_coin( - user: &signer, - prediction: bool, - nonce: u64, -) acquires CoinFlipStats { - // ... - let (heads, correct_prediction) = flip(prediction, nonce); - - if (correct_prediction) { - coin_flip_stats.wins = coin_flip_stats.wins + 1; - } else { - coin_flip_stats.losses = coin_flip_stats.losses + 1; - }; - - event::emit_event( - &mut coin_flip_stats.event_handle, - CoinFlipEvent { - prediction: prediction, - result: heads, - wins: coin_flip_stats.wins, - losses: coin_flip_stats.losses, - } - ); -} -``` -The events emitted are of type `CoinFlipEvent`, shown below: -```rust -struct CoinFlipEvent has copy, drop, store { - prediction: bool, // true = heads, false = tails - result: bool, - wins: u64, - losses: u64, -} -``` - -### Viewing and understanding how the event data is emitted and processed - -When we submit a transaction that calls the `coin_flip` entry function, the indexer parses the events and records the data of each event that occurred in the transaction. - -Within the `data` field of each `Event` type, we see the arbitrary event data emitted. We use this data to store the event data in our database. - -The processor loops over each event in each transaction to process all event data. There are a *lot* of various types of events that can occur in a transaction- so we need to write a filtering function to deal with various events we don't want to store in our database. - -This is the simple iterative structure for our event List: - -```python -for event_index, event in enumerate(user_transaction.events): - # Skip events that don't match our filter criteria - if not CoinFlipProcessor.included_event_type(event.type_str): - continue -``` - -where the `included_event_type` function is a static method in our `CoinFlipProcessor` class: - -```python -@staticmethod -def included_event_type(event_type: str) -> bool: - parsed_tag = event_type.split("::") - module_address = parsed_tag[0] - module_name = parsed_tag[1] - event_type = parsed_tag[2] - # Now we can filter out events that are not of type CoinFlipEvent - # We can filter by the module address, module name, and event type - # If someone deploys a different version of our contract with the same event type, we may want to index it one day. - # So we could only check the event type instead of the full string - # For our sake, check the full string - return ( - module_address - == "0xe57752173bc7c57e9b61c84895a75e53cd7c0ef0855acd81d31cb39b0e87e1d0" - and module_name == "coin_flip" - and event_type == "CoinFlipEvent" - ) -``` - -If you wanted to see the event data for yourself inside the processor loop, you could add something like this to your `processor.py` file: - -```python -for event_index, event in enumerate(user_transaction.events): - # Skip events that don't match our filter criteria - if not CoinFlipProcessor.included_event_type(event.type_str): - continue - - # ... - - # Load the data into a json object and then use/view it as a regular dictionary - data = json.loads(event.data) - print(json.dumps(data, indent=3)) -``` -In our case, a single event prints this out: - - -```json -{ - 'losses': '49', - 'prediction': False, - 'result': True, - 'wins': '51' -} -``` - -So we'll get our data like this: - -```python -prediction = bool(data["prediction"]) -result = bool(data["result"]) -wins = int(data["wins"]) -losses = int(data["losses"]) - -# We have extra data to insert into the database, because we want to process our data. -# Calculate the total -win_percentage = wins / (wins + losses) -``` - -And then we add it to our event list with this: - -```python -# Create an instance of CoinFlipEvent -event_db_obj = CoinFlipEvent( - sequence_number=sequence_number, - creation_number=creation_number, - account_address=account_address, - transaction_version=transaction_version, - transaction_timestamp=transaction_timestamp, - prediction=prediction, - result=result, - wins=wins, - losses=losses, - win_percentage=win_percentage, - inserted_at=datetime.now(), - event_index=event_index, # when multiple events of the same type are emitted in a single transaction, this is the index of the event in the transaction -) -event_db_objs.append(event_db_obj) -``` -### Creating your database model - -Now that we know how we store our CoinFlipEvents in our database, let's go backwards a bit and clarify how we *create* this model for the database to use. - -We need to structure the `CoinFlipEvent` class in `models.py` to reflect the structure in our Move contract: - -```python -class CoinFlipEvent(Base): - __tablename__ = "coin_flip_events" - __table_args__ = ({"schema": COIN_FLIP_SCHEMA_NAME},) - - sequence_number: BigIntegerPrimaryKeyType - creation_number: BigIntegerPrimaryKeyType - account_address: StringPrimaryKeyType - prediction: BooleanType # from (event.data["prediction"] - result: BooleanType # from (event.data["result"] - wins: BigIntegerType # from (event.data["wins"] - losses: BigIntegerType # from (event.data["losses"] - win_percentage: NumericType # calculated from the above - transaction_version: BigIntegerType - transaction_timestamp: TimestampType - inserted_at: InsertedAtType - event_index: BigIntegerType -``` - -The unmarked fields are from the default event data for every event emitted on Aptos. The marked fields are specifically from the fields we calculated above. - -The other fields, __tablename__ and __table_args__, are indications to the python SQLAlchemy library as to what database and schema name we are using. - -## Running the indexer processor - -Now that we have our configuration files and our database and the python database model set up, we can run our processor. - -Navigate to the `python` directory of your indexer repository: - -```shell -cd ~/indexer/python -``` - -And then run the following command: - -```shell -poetry run python -m processors.main -c processors/coin_flip/config.yaml -``` - -If you're processing events correctly, the events should now show up in your database. diff --git a/developer-docs-site/docs/indexer/custom-processors/index.md b/developer-docs-site/docs/indexer/custom-processors/index.md deleted file mode 100644 index 9988a882eda77..0000000000000 --- a/developer-docs-site/docs/indexer/custom-processors/index.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -title: "Custom Processors" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -This section explains what a custom processor is, how to write and deploy one, and how to parse transactions from the [Transaction Stream Service](/indexer/txn-stream). - diff --git a/developer-docs-site/docs/indexer/custom-processors/parsing-txns.md b/developer-docs-site/docs/indexer/custom-processors/parsing-txns.md deleted file mode 100644 index a34cd5b3c1ed9..0000000000000 --- a/developer-docs-site/docs/indexer/custom-processors/parsing-txns.md +++ /dev/null @@ -1,81 +0,0 @@ ---- -title: "Parsing Transactions" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - - - -Fundamentally an indexer processor is just something that consumes a stream of a transactions and writes processed data to storage. Let's dive into what a transaction is and what kind of information you can extract from one. - -## What is a transaction? - -A transaction is a unit of execution on the Aptos blockchain. If the execution of the program in a transaction (e.g. starting with an entry function in a Move module) is successful, the resulting change in state will be applied to the ledger. Learn more about the transaction lifecycle at [this page](/concepts/blockchain/#life-of-a-transaction). - -There are four types of transactions on Aptos: -- Genesis -- Block metadata transactions -- State checkpoint transactions -- User transactions - -The first 3 of these are internal to the system and are not relevant to most processors; we do not cover them in this guide. - -Generally speaking, most user transactions originate from a user calling an entry function in a Move module deployed on chain, for example `0x1::coin::transfer`. In all other cases they originate from [Move scripts](/move/move-on-aptos/move-scripts). You can learn more about the different types of transactions [here](../../concepts/txns-states##types-of-transactions). - -A user transaction that a processor handles contains a variety of information. At a high level it contains: -- The payload that was submitted. -- The changes to the ledger resulting from the execution of the function / script. - -We'll dive into this in the following sections. - -## What is important in a transaction? - -### Payload - -The payload is what the user submits to the blockchain when they wish to execute a Move function. Some of the key information in the payload is: -- The sender address -- The address + module name + function name of the function being executed. -- The arguments to the function. - -There is other potentially interesting information in the payload that you can learn about at [this page](/concepts/txns-states#contents-of-a-transaction). - -### Events - -Events are emitted during the execution of a transaction. Each Move module can define its own events and choose when to emit the events during execution of a function. - -For example, in Move you might have the following: -```rust -struct MemberInvitedEvent has store, drop { - member: address, -} - -public entry fun invite_member(member: address) { - event::emit_event( - &mut member_invited_events, - MemberInvitedEvent { member }, - ); -} -``` - -If `invite_member` is called, you will find the `MemberInvitedEvent` in the transaction. - -:::tip Why emit events? -This is a good question! In some cases, you might find it unnecessary to emit events since you can just parse the writesets. However, sometimes it is quite difficult to get all the data you need from the different "locations" in the transaction, or in some cases it might not even be possible, e.g. if you want to index data that isn't included in the writeset. In these cases, events are a convenient way to bundle together everything you want to index. -::: - -### Writesets - -When a transaction executes, it doesn't directly affect on-chain state right then. Instead, it outputs a set of changes to be made to the ledger, called a writeset. The writeset is applied to the ledger later on after all validators have agreed on the result of the execution. - -Writesets show the end state of the on-chain data after the transaction has occurred. They are the source of truth of what data is stored on-chain. There are several types of write set changes: - -- Write module / delete module -- Write resource / delete resource -- Write table item / delete table item - - diff --git a/developer-docs-site/docs/indexer/indexer-landing.md b/developer-docs-site/docs/indexer/indexer-landing.md deleted file mode 100644 index 1abd424bf82ed..0000000000000 --- a/developer-docs-site/docs/indexer/indexer-landing.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -title: "Learn about Indexing" ---- - -import BetaNotice from '../../src/components/_indexer_beta_notice.mdx'; -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - - - -## Quick Start - -Refer to this role-oriented guide to help you quickly find the relevant docs: - -- Core Infra Provider: You want to run your own Transaction Stream Service in addition to the rest of the stack. - - See docs for [Self-Hosted Transaction Stream Service](/indexer/txn-stream/self-hosted). -- API Operator: You want to run the Indexer API on top of a hosted Transaction Stream Service. - - See docs for [Self-Hosted Indexer API](/indexer/api/self-hosted). -- Custom Processor Builder: You want to build a custom processor on top of a hosted Transaction Stream Service. - - See docs for [Custom Processors](/indexer/custom-processors). -- Indexer API Consumer: You want to use a hosted Indexer API. - - See docs for the [Labs-Hosted Indexer API](/indexer/api/labs-hosted). - - See the [Indexer API Usage Guide](/indexer/api/usage-guide). - -# Architecture Overview - -Typical applications built on the Aptos blockchain, on any blockchain for that matter, require the raw blockchain data to be shaped and stored in an application-specific manner. This is essential to supporting low-latency and rich experiences when consuming blockchain data in end-user apps from millions of users. The [Aptos Node API](https://aptos.dev/nodes/aptos-api-spec#/) provides a lower level, stable and generic API and is not designed to support data shaping and therefore cannot support rich end-user experiences directly. - -The Aptos Indexer is the answer to this need, allowing the data shaping critical to real-time app use. See this high-level diagram for how Aptos indexing works: - -
- -
- -At a high level, indexing on the Aptos blockchain works like this: - -1. Users of a dApp, for example, on an NFT marketplace, interact with the Aptos blockchain via a rich UI presented by the dApp. Behind the scenes, these interactions generate, via smart contracts, the transaction and event data. This raw data is stored in the distributed ledger database, for example, on an Aptos fullnode. -1. This raw ledger data is read and indexed using an application-specific data model, in this case an NFT marketplace-specific data model (”Business logic” in the above diagram). This NFT marketplace-specific index is then stored in a separate database (”Indexed database” in the above diagram) and exposed via an API. -1. The dApp sends NFT-specific GraphQL queries to this indexed database and receives rich data back, which is then served to the users. - -Step 2 is facilitated by the Aptos Indexer. The diagram above is a simplified view of how the system works at a high level. In reality, the system is composed of many components. If you are interested in these details, see the [Detailed Overview](#detailed-overview) below. - -## Indexer API - -Aptos supports the following ways to access indexed data. - -1. [Labs hosted Indexer API](/indexer/api/labs-hosted): This API is rate-limited and is intended only for lightweight applications such as wallets. This option is not recommended for high-bandwidth applications. -2. [Self hosted Indexer API](/indexer/api/self-hosted): Run your own deployment of the Labs hosted indexer stack. -3. [Custom processor](/indexer/custom-processors): Write and deploy a custom processor to index and expose data in a way specific to your needs. - -## Transaction Stream Service - -The Indexer API and Custom Processors depend on the Transaction Stream Service. In short, this service provides a GRPC stream of transactions that processors consume. Learn more about this service [here](/indexer/txn-stream/). Aptos Labs offers a [hosted instance of this service](/indexer/txn-stream/labs-hosted) but you may also [run your own](/indexer/txn-stream/self-hosted). - -## Detailed Overview - -This diagram explains how the Aptos Indexer tech stack works in greater detail. - -
-
- -
-
- - - -## Legacy Indexer -Find information about the legacy indexer [here](/indexer/legacy/). diff --git a/developer-docs-site/docs/indexer/legacy/custom-data-model.md b/developer-docs-site/docs/indexer/legacy/custom-data-model.md deleted file mode 100644 index dc50288aeab4e..0000000000000 --- a/developer-docs-site/docs/indexer/legacy/custom-data-model.md +++ /dev/null @@ -1,196 +0,0 @@ ---- -title: "Custom Data Model" ---- - -:::warning Legacy Indexer -This is documentation for the legacy indexer. To learn how to write a custom processor with the latest indexer stack, see [Custom Processors](/indexer/custom-processors). -::: - -## Define your own data model - -Use this method if you want to develop your custom indexer for the Aptos ledger data. - -:::tip When to use custom indexer -Currently Aptos-provided indexing service (see above) supports the following core Move modules: -- `0x1::coin`. -- `0x3::token`. -- `0x3::token_transfers`. - -If you need an indexed database for any other Move modules and contracts, then you should develop your custom indexer. -::: - -Creating a custom indexer involves the following steps. Refer to the indexing block diagram at the start of this document. - -1. Define new table schemas, using an ORM like [Diesel](https://diesel.rs/). In this document Diesel is used to describe the custom indexing steps ("Business logic" and the data queries in the diagram). -2. Create new data models based on the new tables ("Business logic" in the diagram). -3. Create a new transaction processor, or optionally add to an existing processor. In the diagram this step corresponds to processing the ledger database according to the new business logic and writing to the indexed database. -4. Integrate the new processor. Optional if you are reusing an existing processor. - -In the below detailed description, an example of indexing and querying for the coin balances is used. You can see this in the [`coin_processor`](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/processors/coin_processor.rs). - -### 1. Define new table schemas - -In this example we use [PostgreSQL](https://www.postgresql.org/) and [Diesel](https://diesel.rs/) as the ORM. To make sure that we make backward-compatible changes without having to reset the database at every upgrade, we use [Diesel migrations](https://docs.rs/diesel_migrations/latest/diesel_migrations/) to manage the schema. This is why it is very important to start with generating a new Diesel migration before doing anything else. - -Make sure you clone the Aptos-core repo by running `git clone https://github.com/aptos-labs/aptos-core.git` and then `cd` into `aptos-core/tree/main/crates/indexer` directory. Then proceed as below. - -a. The first step is to create a new Diesel migration. This will generate a new folder under [migrations](https://github.com/aptos-labs/aptos-core/tree/main/crates/indexer/migrations) with `up.sql` and `down.sql` - -```bash -DATABASE_URL=postgres://postgres@localhost:5432/postgres diesel migration generate add_coin_tables -``` - -b. Create the necessary table schemas. This is just PostgreSQL code. In the code shown below, the `up.sql` will have the new changes and `down.sql` will revert those changes. - -```sql --- up.sql --- coin balances for each version -CREATE TABLE coin_balances ( - transaction_version BIGINT NOT NULL, - owner_address VARCHAR(66) NOT NULL, - -- Hash of the non-truncated coin type - coin_type_hash VARCHAR(64) NOT NULL, - -- creator_address::name::symbol - coin_type VARCHAR(5000) NOT NULL, - amount NUMERIC NOT NULL, - transaction_timestamp TIMESTAMP NOT NULL, - inserted_at TIMESTAMP NOT NULL DEFAULT NOW(), - -- Constraints - PRIMARY KEY ( - transaction_version, - owner_address, - coin_type_hash - ) -); --- latest coin balances -CREATE TABLE current_coin_balances {...} --- down.sql -DROP TABLE IF EXISTS coin_balances; -DROP TABLE IF EXISTS current_coin_balances; -``` - -See the [full source for `up.sql` and `down.sql`](https://github.com/aptos-labs/aptos-core/tree/main/crates/indexer/migrations/2022-10-04-073529_add_coin_tables). - -c. Run the migration. We suggest running it multiple times with `redo` to ensure that both `up.sql` and `down.sql` are implemented correctly. This will also modify the [`schema.rs`](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/schema.rs) file. - -```bash -DATABASE_URL=postgres://postgres@localhost:5432/postgres diesel migration run -DATABASE_URL=postgres://postgres@localhost:5432/postgres diesel migration redo -``` - -### 2. Create new data schemas - -We now have to prepare the Rust data models that correspond to the Diesel schemas. In the case of coin balances, we will define `CoinBalance` and `CurrentCoinBalance` as below: - -```rust -#[derive(Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(transaction_version, owner_address, coin_type))] -#[diesel(table_name = coin_balances)] -pub struct CoinBalance { - pub transaction_version: i64, - pub owner_address: String, - pub coin_type_hash: String, - pub coin_type: String, - pub amount: BigDecimal, - pub transaction_timestamp: chrono::NaiveDateTime, -} - -#[derive(Debug, Deserialize, FieldCount, Identifiable, Insertable, Serialize)] -#[diesel(primary_key(owner_address, coin_type))] -#[diesel(table_name = current_coin_balances)] -pub struct CurrentCoinBalance { - pub owner_address: String, - pub coin_type_hash: String, - pub coin_type: String, - pub amount: BigDecimal, - pub last_transaction_version: i64, - pub last_transaction_timestamp: chrono::NaiveDateTime, -} -``` - -We will also need to specify the parsing logic, where the input is a portion of the transaction. In the case of coin balances, we can find all the details in `WriteSetChanges`, specifically where the write set change type is `write_resources`. - -**Where to find the relevant data for parsing**: This requires a combination of understanding the Move module and the structure of the transaction. In the example of coin balance, the contract lives in [coin.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move), specifically the coin struct (search for `struct Coin`) that has a `value` field. We then look at an [example transaction](https://fullnode.testnet.aptoslabs.com/v1/transactions/by_version/259518) where we find this exact structure in `write_resources`: - -```json -"changes": [ - { - ... - "data": { - "type": "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", - "data": { - "coin": { - "value": "49742" - }, - ... -``` - -See the full code in [coin_balances.rs](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/models/coin_models/coin_balances.rs). - -### 3. Create a new processor - -Now that we have the data model and the parsing function, we need to call that parsing function and save the resulting model in our Postgres database. We do this by creating (or modifying) a `processor`. We have abstracted a lot already from that class, so the only function that should be implemented is `process_transactions` (there are a few more functions that should be copied, those should be obvious from the example). - -The `process_transactions` function takes in a vector of transactions with a start and end version that are used for tracking purposes. The general flow should be: - - Loop through transactions in the vector. - - Aggregate relevant models. Sometimes deduping is required, e.g. in the case of `CurrentCoinBalance`. - - Insert the models into the database in a single Diesel transaction. This is important, to ensure that we do not have partial writes. - - Return status (error or success). - -:::tip Coin transaction processor -See [coin_process.rs](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/processors/coin_processor.rs) for a relatively straightforward example. You can search for `coin_balances` in the page for the specific code snippet related to coin balances. -::: - -**How to decide whether to create a new processor:** This is completely up to you. The benefit of creating a new processor is that you are starting from scratch so you will have full control over exactly what gets written to the indexed database. The downside is that you will have to maintain a new fullnode, since there is a 1-to-1 mapping between a fullnode and the processor. - -### 4. Integrate the new processor - -This is the easiest step and involves just a few additions. - -1. To start with, make sure to add the new processor in the Rust code files: [`mod.rs`](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/processors/mod.rs) and [`runtime.rs`](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/runtime.rs). See below: - -[**mod.rs**](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/processors/mod.rs) - -```rust -pub enum Processor { - CoinProcessor, - ... -} -... - COIN_PROCESSOR_NAME => Self::CoinProcessor, -``` - -[**runtime.rs**](https://github.com/aptos-labs/aptos-core/blob/main/crates/indexer/src/runtime.rs) - -```rust -Processor::CoinProcessor => Arc::new(CoinTransactionProcessor::new(conn_pool.clone())), -``` - -2. Create a `fullnode.yaml` with the correct configuration and test the custom indexer by starting a fullnode with this `fullnode.yaml`. - -**fullnode.yaml** - -```yaml -storage: - enable_indexer: true - storage_pruner_config: - ledger_pruner_config: - enable: false - -indexer: - enabled: true - check_chain_id: true - emit_every: 1000 - postgres_uri: "postgres://postgres@localhost:5432/postgres" - processor: "coin_processor" - fetch_tasks: 10 - processor_tasks: 10 -``` - -Test by starting an Aptos fullnode by running the below command. You will see many logs in the terminal output, so use the `grep` filter to see only indexer log output, as shown below: - -```bash -cargo run -p aptos-node --features "indexer" --release -- -f ./fullnode_coin.yaml | grep -E "_processor" -``` - -See the full instructions on how to start an indexer-enabled fullnode in [Indexer Fullnode](./indexer-fullnode). diff --git a/developer-docs-site/docs/indexer/legacy/index.md b/developer-docs-site/docs/indexer/legacy/index.md deleted file mode 100644 index 9ff2c96e6ba08..0000000000000 --- a/developer-docs-site/docs/indexer/legacy/index.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -title: "Legacy Indexer" ---- - -# Legacy Indexer - -:::caution Deprecation Alert - -From Now - end of Q2, 2024: We will not be adding any new features to the legacy Indexer. However, we will continue to generally support the community, and will make sure that any changes made on the blockchain level does not break the existing legacy processors. - -After Q2, 2024: We will remove the indexer crates from the [aptos-core](https://github.com/aptos-labs/aptos-core) repo and the legacy indexer will no longer be supported. Please look at our new [Transaction Stream Service](/indexer/txn-stream/) and updated [Indexer API](/indexer/api/) - -::: diff --git a/developer-docs-site/docs/indexer/legacy/indexer-fullnode.md b/developer-docs-site/docs/indexer/legacy/indexer-fullnode.md deleted file mode 100644 index a815b2304a5d3..0000000000000 --- a/developer-docs-site/docs/indexer/legacy/indexer-fullnode.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: "Run an Indexer Fullnode" -slug: "indexer-fullnode" ---- - -:::warning Legacy Indexer -This is documentation for the legacy indexer. To learn how to run the underlying infrastructure for the latest indexer stack, see [Transaction Stream Service](/indexer/txn-stream). -::: - -# Run an Aptos Indexer - -:::danger On macOS with Apple silicon only -The below installation steps are verified only on macOS with Apple silicon. They might require minor tweaking when running on other builds. -::: - -## Summary - -To run an indexer fullnode, these are the steps in summary: - -1. Make sure that you have all the required tools and packages described below in this document. -1. Follow the instructions to [set up a public fullnode](/nodes/full-node/fullnode-source-code-or-docker.md) but do not start the fullnode yet. -1. Edit the `fullnode.yaml` as described below in this document. -1. Run the indexer fullnode per the instructions below. - -## Prerequisites - -Install the packages below. Note, you may have already installed many of these while [preparing your development environment](/guides/building-from-source). You can confirm by running `which command-name` and ensuring the package appears in the output (although `libpq` will not be returned even when installed). - -> Important: If you are on macOS, you will need to [install Docker following the official guidance](https://docs.docker.com/desktop/install/mac-install/) rather than `brew`. - -For an Aptos indexer fullnode, install these packages: - - - [`brew`](https://brew.sh/) - `/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"` Run the commands emitted in the output to add the command to your path and install any dependencies - - [`cargo` Rust package manager](https://www.rust-lang.org/tools/install) - `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` - - [`docker`](https://docs.docker.com/get-docker/) - `brew install docker` - - [libpq Postgres C API library containing the `pg_ctl` command](https://formulae.brew.sh/formula/libpq) - `brew install libpq` - Make sure to perform all export commands after the installation. - - [`postgres` PostgreSQL server](https://www.postgresql.org/) - `brew install postgresql` - - [`diesel`](https://diesel.rs/) - `brew install diesel` - -## Set up the database - -1. Start the PostgreSQL server: - `brew services start postgresql` -1. Ensure you can run `psql postgres` and then exit the prompt by entering: `\q` -1. Create a PostgreSQL user `postgres` with the `createuser` command (find it with `which`): - ```bash - /path/to/createuser -s postgres - ``` -1. Clone `aptos-core` repository if you have not already: - ```bash - git clone https://github.com/aptos-labs/aptos-core.git - ``` -1. Navigate (or `cd`) into `aptos-core/crates/indexer` directory. -1. Create the database schema: - ```bash - diesel migration run --database-url postgresql://localhost/postgres - ``` - This will create a database schema with the subdirectory `migrations` located in this `aptos-core/crates/indexer` directory. If for some reason this database is already in use, try a different database. For example: `DATABASE_URL=postgres://postgres@localhost:5432/indexer_v2 diesel database reset` - -## Start the fullnode indexer - -1. Follow the instructions to set up a [public fullnode](/nodes/full-node/fullnode-source-code-or-docker.md) and prepare the setup, but **do not** yet start the indexer (with `cargo run` or `docker run`). -1. Pull the latest indexer Docker image with: - ```bash - docker pull aptoslabs/validator:nightly_indexer - ``` -1. Edit the `./fullnode.yaml` and add the following configuration: - ```yaml - storage: - enable_indexer: true - # This is to avoid the node being pruned - storage_pruner_config: - ledger_pruner_config: - enable: false - - indexer: - enabled: true - postgres_uri: "postgres://postgres@localhost:5432/postgres" - processor: "default_processor" - check_chain_id: true - emit_every: 500 - ``` - -:::tip Bootstap the fullnode -Instead of syncing your indexer fullnode from genesis, which may take a long period of time, you can choose to bootstrap your fullnode using backup data before starting it. To do so, follow the instructions to [restore from a backup](/nodes/full-node/aptos-db-restore.md). - -Note: indexers cannot be bootstrapped using [a snapshot](/nodes/full-node/bootstrap-fullnode.md) or [fast sync](../../guides/state-sync.md#fast-syncing). -::: - -1. Run the indexer fullnode with either `cargo run` or `docker run` depending upon your setup. Remember to supply the arguments you need for your specific node: - ```bash - docker run -p 8080:8080 \ - -p 9101:9101 -p 6180:6180 \ - -v $(pwd):/opt/aptos/etc -v $(pwd)/data:/opt/aptos/data \ - --workdir /opt/aptos/etc \ - --name=aptos-fullnode aptoslabs/validator:nightly_indexer aptos-node \ - -f /opt/aptos/etc/fullnode.yaml - ``` - or: - ```bash - cargo run -p aptos-node --features "indexer" --release -- -f ./fullnode.yaml - ``` - -## Restart the indexer - -To restart the PostgreSQL server: - -1. [shut down the server](https://www.postgresql.org/docs/8.1/postmaster-shutdown.html) by searching for the `postmaster` process and killing it: - ```bash - ps -ef | grep -i postmaster - ``` - -1. Copy the process ID (PID) for the process and pass it to the following command to shut it down: - ```bash - kill -INT PID - ``` - -1. Restart the PostgreSQL server with: - ```bash - brew services restart postgresql@14 - ``` diff --git a/developer-docs-site/docs/indexer/legacy/migration.md b/developer-docs-site/docs/indexer/legacy/migration.md deleted file mode 100644 index c75690a55423c..0000000000000 --- a/developer-docs-site/docs/indexer/legacy/migration.md +++ /dev/null @@ -1,124 +0,0 @@ ---- -title: "Migrate to Transaction Stream Service" ---- - -This guide contains information on how to migrate to using the Transaction Stream Service if you are currently running a legacy indexer. - -The old indexer stack requires running an archival fullnode with additional threads to process the transactions which is difficult and expensive to maintain. Adding more custom logic either requires a bulkier machine, or running several fullnodes that scale linearly. - -This new way of indexing uses the [Transaction Stream Service](https://aptos.dev/indexer/txn-stream/). You can either use the [Labs-Hosted Transaction Stream Service](https://aptos.dev/indexer/txn-stream/labs-hosted/) or [run your own instance of Transaction Stream Service](https://aptos.dev/indexer/txn-stream/self-hosted). - -## 1. Clone the repo - -``` -# SSH -git clone git@github.com:aptos-labs/aptos-indexer-processors.git - -# HTTPS -git clone https://github.com/aptos-labs/aptos-indexer-processors.git -``` - -Navigate to the directory for the service: - -``` -cd aptos-indexer-processors -cd rust/processor -``` - -## 2. Migrate processors to Transaction Stream Service - -For each processor you're migrating, you'll need to create a config file using the template below. You can find more information about each field of the config file [here](https://aptos.dev/indexer/api/self-hosted/#configuration). - -```yaml -health_check_port: 8084 -server_config: - processor_config: - type: default_processor - postgres_connection_string: - indexer_grpc_data_service_address: - indexer_grpc_http2_ping_interval_in_secs: 60 - indexer_grpc_http2_ping_timeout_in_secs: 10 - auth_token: - starting_version: 0 # optional - ending_version: 0 # optional -``` - -To connect the processor to the Transaction Stream Service, you need to set the URL for `indexer_grpc_data_service_address`. Choose one of the following options. - -### Option A: Connect to Labs-Hosted Transaction Stream Service - -The main benefit of using the Labs-Hosted Transaction Stream Service is that you no longer need to run an archival fullnode to get a stream of transactions. This service is rate-limited. Instructions to connect to Labs-Hosted Transaction Stream can be found [here](https://aptos.dev/indexer/txn-stream/labs-hosted). - -### Option B: Run a Self-Hosted Transaction Stream Service - -If you choose to, you can run a self-hosted instance of the Transaction Stream Service and connect your processors to it. Instructions to run a Self-Hosted Transaction Stream can be found [here](https://aptos.dev/indexer/txn-stream/self-hosted). - -## 3. (Optional) Migrate custom processors to Transaction Stream Service - -If you have custom processors written with the old indexer, we highly recommend starting from scratch with a new database. Using a new database ensures that all your custom database migrations will be applied during this migration. - -### a. Migrate custom table schemas - -Migrate your custom schemas by copying over each of your custom migrations to the [`migrations`](https://github.com/aptos-labs/aptos-indexer-processors/tree/main/rust/processor/migrations) folder. - -### b. Migrate custom processors code - -Migrate the code by copying over your custom processors to the [`processors`](https://github.com/aptos-labs/aptos-indexer-processors/tree/main/rust/processor) folder and any relevant custom models to the [`models`](https://github.com/aptos-labs/aptos-indexer-processors/tree/main/rust/processor/src/models) folder. Integrate the custom processors with the rest of the code by adding them to the following Rust code files. - -[`mod.rs`](https://github.com/aptos-labs/aptos-indexer-processors/blob/main/rust/processor/src/processors/mod.rs) - -``` -pub enum Processor { - ... - CoinProcessor, - ... -} - -impl Processor { - ... - COIN_PROCESSOR_NAME => Self::CoinProcessor, - ... -} -``` - -[`worker.rs`](https://github.com/aptos-labs/aptos-indexer-processors/blob/main/rust/processor/src/worker.rs) - -``` -Processor::CoinProcessor => { - Arc::new(CoinTransactionProcessor::new(self.db_pool.clone())) -}, -``` - -## 4. Backfill Postgres database with Diesel - -Even though the new processors have the same Postgres schemas as the old ones, we recommend you do a complete backfill (ideally writing to a new DB altogether) because some fields are a bit different as a result of the protobuf conversion. - -These instructions asusme you are familar with using [Diesel migrations](https://docs.rs/diesel_migrations/latest/diesel_migrations/). Run the full database migration with the following command: - -``` -DATABASE_URL=postgres://postgres@localhost:5432/postgres diesel migration run -``` - -## 5. Run the migrated processors - -To run a single processor, use the following command: - -``` -cargo run --release -- -c config.yaml -``` - -If you have multiple processors, you'll need to run a separate instance of the service for each of the processors. - -If you'd like to run the processor as a Docker image, the instructions are listed [here](https://aptos.dev/indexer/api/self-hosted#run-with-docker). - -## FAQs - -### 1. Will the protobuf ever be updated, and what do I need to do at that time? - -The protobuf schema may be updated in the future. Backwards incompatible changes will be communicated in release notes. - -### 2. What if I already have custom logic written in the old indexer? Is it easy to migrate those? - -Since the new indexer stack has the same Postgres schema as the old indexer stack, it should be easy to migrate your processors. We still highly recommend creating a new DB for this migration so that any custom DB migrations are applie. - -Follow Step 3 in this guide to migrate your custom logic over to the new processors stack. diff --git a/developer-docs-site/docs/indexer/txn-stream/index.md b/developer-docs-site/docs/indexer/txn-stream/index.md deleted file mode 100644 index ba0472f0b465f..0000000000000 --- a/developer-docs-site/docs/indexer/txn-stream/index.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -title: "Transaction Stream Service" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -The Transaction Stream Service is a service that listens to the Aptos blockchain and emits transactions as they are processed. These docs explain how this system works, how to use the Labs-Hosted instance of the service, and how to deploy it yourself. diff --git a/developer-docs-site/docs/indexer/txn-stream/labs-hosted.md b/developer-docs-site/docs/indexer/txn-stream/labs-hosted.md deleted file mode 100644 index 642e8611e19b3..0000000000000 --- a/developer-docs-site/docs/indexer/txn-stream/labs-hosted.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "Labs-Hosted Transaction Stream Service" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -If you are running your own instance of the [Indexer API](/indexer/api), or a [custom processor](/indexer/custom-processors), you must have access to an instance of the Transaction Stream Service. This page contains information about how to use the Labs-Hosted Transaction Stream Service. - -## Endpoints -All endpoints are in GCP us-central1 unless otherwise specified. - -- **Mainnet:** grpc.mainnet.aptoslabs.com:443 -- **Testnet:** grpc.testnet.aptoslabs.com:443 -- **Devnet:** grpc.devnet.aptoslabs.com:443 - - - -## Auth tokens - -In order to use the Labs-Hosted Transaction Stream Service you must have an auth token. To get an auth token, do the following: -1. Go to https://aptos-api-gateway-prod.firebaseapp.com. -1. Sign in and select "API Tokens" in the left sidebar. -1. Create a new token. You will see the token value in the first table. - -You can provide the auth key by setting the `Authorization` HTTP header ([MDN](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization)). For example, with curl: -``` -curl -H 'Authorization: Bearer aptoslabs_yj4donpaKy_Q6RBP4cdBmjA8T51hto1GcVX5ZS9S65dx' -``` - -For more comprehensive information about how to use the Transaction Stream Service, see the docs for the downstream systems: -- [Indexer API](/indexer/api/self-hosted) -- [Custom Processors](/indexer/custom-processors) diff --git a/developer-docs-site/docs/indexer/txn-stream/local-development.md b/developer-docs-site/docs/indexer/txn-stream/local-development.md deleted file mode 100644 index f79a693f471f0..0000000000000 --- a/developer-docs-site/docs/indexer/txn-stream/local-development.md +++ /dev/null @@ -1,123 +0,0 @@ ---- -title: "Running Locally" ---- - -# Running the Transaction Stream Service Locally - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -:::info -This has been tested on MacOS 13 on ARM and Debian 11 on x86_64. -::: - -When building a custom processor, you might find it helpful to develop against a local development stack. The Transaction Stream Service is a complicated, multi-component system. To assist with local development, we offer a Python script that wraps a Docker compose file to set up the entire system. - -This script sets up the following: -- Single node testnet with the indexer GRPC stream enabled. -- A Redis instance. -- Transaction Stream Service, including the following components: - - [cache-worker](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/indexer-grpc/indexer-grpc-cache-worker): Pulls transactions from the node and stores them in Redis. - - [file-store](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/indexer-grpc/indexer-grpc-file-store): Fetches transactions from Redis and stores them in a filesystem. - - [data-service](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/indexer-grpc/indexer-grpc-data-service): Serves transactions via a GRPC stream to downstream clients. It pulls from either the cache or the file store depending on the age of the transaction. -- Shared volumes and networking to hook it all up. - -You can learn more about the Transaction Stream Service architecture [here](/indexer/txn-stream) and the Docker compose file [here](https://github.com/aptos-labs/aptos-core/blob/main/docker/compose/indexer-grpc/docker-compose.yaml). - -## Prerequisites -In order to use the local development script you must have the following installed: -- Python 3.8+: [Installation Guide](https://docs.python-guide.org/starting/installation/#python-3-installation-guides). -- Poetry: [Installation Guide](https://python-poetry.org/docs/#installation). -- Docker: [Installation Guide](https://docs.docker.com/get-docker/). -- Docker Compose v2: This should be installed by default with modern Docker installations, verify with this command: -```bash -docker-compose version --short -``` -- grpcurl: [Installation Guide](https://github.com/fullstorydev/grpcurl#installation) -- OpenSSL - -## Preparation -Clone the aptos-core repo: -``` -# HTTPS -git clone https://github.com/aptos-labs/aptos-core.git - -# SSH -git clone git@github.com:aptos-labs/aptos-core.git -``` - -Navigate to the `testsuite` directory: -``` -cd aptos-core -cd testsuite -``` - -Install the Python dependencies: -``` -poetry install -``` - -## Running the script -### Starting the service -``` -poetry run python indexer_grpc_local.py start -``` - -You will know this succeeded if the command exits and you see the following: -``` -Attempting to stream from indexer grpc for 10s -Stream finished successfully -``` - -### Stopping the service -``` -poetry run python indexer_grpc_local.py stop -``` - -### Wiping the data -When you start, stop, and start the service again, it will re-use the same local testnet data. If you wish to wipe the local testnet and start from scratch you can run the following command: -``` -poetry run python indexer_grpc_local.py wipe -``` - -## Using the local service -You can connect to the local Transaction Stream Service, e.g. from a custom processor, using the following configuration values: -``` -indexer_grpc_data_service_address: 127.0.0.1:50052 -auth_token: dummy_token -``` - -You can connect to the node at the following address: -``` -http://127.0.0.1:8080/v1 -``` - -## Debugging - -### Usage on ARM systems -If you have a machine with an ARM processor, e.g. an M1/M2 Mac, the script should detect that and set the appropriate environment variables to ensure that the correct images will be used. If you have issues with this, try setting the following environment variable: -```bash -export DOCKER_DEFAULT_PLATFORM=linux/amd64 -``` - -Additionally, make sure the following settings are correct in Docker Desktop: -- Enabled: Preferences > General > Use Virtualization framework -- Enabled: Preferences > General > Use Docker Compose V2 -- Disabled: Features in development -> Use Rosetta for x86/amd64 emulation on Apple Silicon - -This script has not been tested on Linux ARM systems. - -### Redis fails to start -Try setting the following environment variable before running the script: -```bash -export REDIS_IMAGE_REPO=arm64v8/redis -``` - -### Cache worker is crashlooping or `Redis latest version update failed.` in log -Wipe the data: -```bash -poetry run python indexer_grpc_local.py wipe -``` - -This means historical data will be lost. diff --git a/developer-docs-site/docs/indexer/txn-stream/self-hosted.md b/developer-docs-site/docs/indexer/txn-stream/self-hosted.md deleted file mode 100644 index 4cd2a5675cd77..0000000000000 --- a/developer-docs-site/docs/indexer/txn-stream/self-hosted.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -title: "Self-Hosted Transaction Stream Service" ---- - -import BetaNotice from '../../../src/components/_indexer_beta_notice.mdx'; - - - -Coming soon! diff --git a/developer-docs-site/docs/integration/aptos-name-service-connector.md b/developer-docs-site/docs/integration/aptos-name-service-connector.md deleted file mode 100644 index ef27af9a6b61e..0000000000000 --- a/developer-docs-site/docs/integration/aptos-name-service-connector.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -title: "Integrate with Aptos Names Service" -id: "aptos-names-service-package" ---- -# Integrate with Aptos Names Service -The Aptos Name Service provides a React UI package that offers developers a customizable button and modal to enable users to search for and mint Aptos names directly from their website. - -## Prerequisites -- [React project](https://create-react-app.dev/docs/getting-started/) -- Supporting dependencies installed in the root directory of your React project with npm or yarn: - - `npm install @emotion/styled @emotion/react` - - `yarn add @emotion/styled @emotion/react react-copy-to-clipboard` - -## Use Aptos Names Service Connector -1. Open a terminal session and navigate to the root directory of your React project. -1. Install the `aptos-names-connector` package using npm or yarn: - - `npm install "@aptos-labs/aptos-names-connector"` - - `yarn add "@aptos-labs/aptos-names-connector"` -1. Once you have installed the package, you can import the `AptosNamesConnector` component and use it in your React application (by default in `./src/App.js`): - ``` - import { AptosNamesConnector } from "@aptos-labs/aptos-names-connector"; - - function MyComponent() { - const handleSignTransaction = async () => { - // Handle signing of transaction - }; - - return ( - - ); - } - ``` - 1. To see your changes, start a development server using npm or yarn. The following commands will open the React application in your default web browser (typically to `localhost:3000`): - - `npm start` - - `yarn start` - -## Configure `AptosNamesConnector` properties -The `AptosNamesConnector` component accepts the following props: - -- `onSignTransaction`: A required callback function that is called when the user clicks the "Mint" button in the modal. This function should handle the signing of the transaction. -- `isWalletConnected`: A boolean value that indicates whether the user's wallet is connected. -- `network`: A string value that specifies whether the component should connect to the mainnet or testnet. -- `buttonLabel`: A string value that specifies the text to display on the button. - -## Customize button label and appearance -The button label can be customized by passing a string value to the buttonLabel prop. -The appearance of the button in the `AptosNamesConnector` component can be customized to fit in your website. The button has the CSS class name of `ans_connector_button`: - -``` -.ans-connector-button { - background-color: #000000; - border: none; - border-radius: 4px; - color: #ffffff; - cursor: pointer; - font-size: 16px; - font-weight: bold; - padding: 12px 16px; -} -``` -To use `ans_connector_button` in your React application, add `import "@aptos-labs/aptos-names-connector/dist/index.css";` to the top of your App.js file and reference it with `` - -## Supported networks -The `AptosNamesConnector` component supports both mainnet and testnet. To connect to the mainnet, set the network prop to "mainnet". To connect to the testnet, set the network prop to "testnet". - -## Example -The following example shows how to use the `AptosNamesConnector` component in a React application: - - - -- Add a ‘claim name’ button to any page in your application. This allows your users to directly create an Aptos name, giving them a human-readable .apt name for their Aptos wallet address. You can customize the look of the button to suit your application. Here is an example on the profile page of an NFT marketplace. - -![Claim name](../../static/img/docs/ans_entrypoint_example.png) - -- When the button is clicked, the Aptos Names modal will show up, and the user can search for a name and mint it directly in your application. - -![Show Aptos Name Service modal](../../static/img/docs/ans_entrypoint_modal_example.png) - -- Once the user has minted their name, you can replace their Aptos wallet address by querying from Aptos fullnodes. Now your users have a human-readable .apt name. - -![Claim another name](../../static/img/docs/ans_entrypoint_with_other_name.png) diff --git a/developer-docs-site/docs/integration/index.md b/developer-docs-site/docs/integration/index.md deleted file mode 100644 index 546576cce01cf..0000000000000 --- a/developer-docs-site/docs/integration/index.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: "Interact with Blockchain" ---- - -# Integrate with the Aptos Blockchain - -Follow these guides to work with the Aptos blockchain. - -- ### [Aptos Token Overview](../guides/nfts/aptos-token-overview.md) -- ### [Integrate with Aptos Names Service](aptos-name-service-connector.md) -- ### [Integrate with the Aptos Faucet](../guides/system-integrators-guide.md#integrating-with-the-faucet) -- ### [Error Codes in Aptos](../reference/error-codes.md) diff --git a/developer-docs-site/docs/integration/wallet-adapter-concept.md b/developer-docs-site/docs/integration/wallet-adapter-concept.md deleted file mode 100644 index fd0d20abb6f81..0000000000000 --- a/developer-docs-site/docs/integration/wallet-adapter-concept.md +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: "Integrate with Aptos Wallets" -id: "wallet-adapter-concept" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Integrate with Aptos wallets - -Decentralized applications often run through a browser extension or mobile application to read onchain data and submit -transactions. The Aptos Wallet Adapter allows for a single interface for apps and wallets to integrate together. - -## Implementing the Aptos Wallet Adapter - -For the best user experience, we suggest that dapps offer multiple wallets, to allow users to choose their preferred -wallet. - -Implementing wallet integration can be difficult for dapps in: - -1. Support and test all edge cases -2. Implement and maintain different wallet APIs -3. Provide users with needed functionality the wallet itself doesn't support -4. Keep track on all the different wallets in our ecosystem - -In addition, creating and implementing a wallet is also not an easy task, - -1. Provide a wallet that follows a known standard so it is easy to integrate with -2. Getting visibility and exposure in the ecosystem among all the other wallets -3. Dapp projects need to dedicate time and resource to integrate the wallet within their app - -When we started building a wallet adapter, we wanted to provide an adapter that can be easy enough for wallets to integrate with and for dapps to use and implement. - -For that, we provide an [Aptos Wallet Adapter](https://github.com/aptos-labs/aptos-wallet-adapter) monorepo for wallet and dapps creators to ease development and ensure a smooth process in building projects on the Aptos network. -The Aptos Wallet Adapter acts as a service between dapps and wallets and exposes APIs for dapps to interact with the wallets by following our [Wallet Standard](../standards/wallets.md). This in turns allows dapps to support many wallets with minimal integration efforts, and for wallets to follow a known standard and gain visibility. - -## Adapter structure - -The adapter has three different components, the: - -1. Adapter Core package -2. Adapter React provider (for dapps) -3. Adapter Template plugin (for wallets) - -This structure offers the following benefits: - -- Modularity (separation of concerns) - separating the adapter into three components can help having more freedom in design, implementation, deployment and usage. -- Wallets create and own their plugin implementation (instead of having all in the same monorepo): - - Reduces the packages bundle size used by dapps. - - Lets them be self-service and support themselves without too much friction. - - Prevents build failures in case of any bugs/bad implementation/wrong config files/etc. -- Simplicity - keeps the Provider package very light and small as the major logic is implemented in the core package. -- Flexibility - for wallets in creating and implementing custom functions. - -### Adapter Core package - -The [Adapter Core package](https://github.com/aptos-labs/aptos-wallet-adapter/tree/main/packages/wallet-adapter-core) handles the interaction between the dapp and the wallet. It: - -- Exposes the standard API (and some different functions supported by different wallets) -- Holds the current wallet state and the installed wallets -- Emits events on different actions and much more - -Dapps should not _know_ this package as dapps interact with the provider, which in turn interacts with the core package; some Types are exposed from the core package for the dapp to use. - -Wallets should implement their own plugin class that extends the basic plugin class (properties + events) interface that lives in the core package. - -:::tip -If a wallet supports functions that are not part of the basic plugin interface, a pull request should be made to the core package to include this function so it can support it. You can take a look at the `signTransaction` on the wallet core package for guidance. -::: - -### Adapter React provider - -The light [Adapter React package](https://github.com/aptos-labs/aptos-wallet-adapter/tree/main/packages/wallet-adapter-react) is for dapps to import and use. The package contains a `Provider` and a `Context` to implement and use within your app. - -Follow the [Wallet Adapter For Dapp Builders](./wallet-adapter-for-dapp.md) guide on how to use the provider package on your dapp. - -### Adapter Template plugin - -Wallets looking to integrate with the adapter should implement their own wallet plugin, to ease the process we provide you with a pre-made class that implements the basic functionality needed (according to the wallet standard). - -The [Wallet Adapter Plugin Template repo](https://github.com/aptos-labs/wallet-adapter-plugin-template) holds a pre-made class, a test file, and some config files to help you build and publish the plugin as an NPM package. - -Follow the [Wallet Adapter For Wallet Builders](./wallet-adapter-for-wallets.md) on how to use the template to implement and publish your wallet plugin. - -
- -
diff --git a/developer-docs-site/docs/integration/wallet-adapter-for-dapp.md b/developer-docs-site/docs/integration/wallet-adapter-for-dapp.md deleted file mode 100644 index 9d5b206645ff6..0000000000000 --- a/developer-docs-site/docs/integration/wallet-adapter-for-dapp.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -title: "For Dapps" -id: "wallet-adapter-for-dapp" ---- - -# Wallet Adapter For Dapp Builders - -Imagine you have a great idea for a dapp and you want to start building it. Eventually, you will need to integrate a wallet or multiple wallets so your users can interact with the Aptos blockchain. -Implementing wallet integration can be difficult in supporting all edge cases, new features, unsupported functionality. And it can be even harder to support multiple wallets. - -In addition, different wallets have different APIs, and not all wallets share the same naming convention. For example, maybe all wallets have a `connect` method, but not all wallets call that method `connect`; that can be tricky to support. - -Luckily, Aptos built a wallet adapter, created and maintained by the Aptos team, to help you ramp up development and standardize where possible. - -The Aptos Wallet Adapter provides: - -- Easy wallet implementation - no need to implement and support code for multiple wallets. -- Support for different wallet APIs. -- Support for features not implemented on the wallet level. -- Detection for uninstalled wallets (so you can show users that a wallet is not installed). -- Auto-connect functionality and remembers the current wallet state. -- Listens to wallet events, such as account and network changes. -- A well-developed and maintained reference implementation by the Aptos ecosystem team. - -## Install - -Currently, the adapter supports a _React provider_ for you to include in your app. - -Install wallet dependencies you want to include in your app. You can find a list of the wallets in the Aptos Wallet Adapter [README](https://github.com/aptos-labs/aptos-wallet-adapter#supported-wallet-packages). - -Install the React provider: - -```bash -npm install @aptos-labs/wallet-adapter-react -``` - -## Import dependencies - -In the `App.jsx` file: - -Import the installed wallets: - -```js -import { PetraWallet } from "petra-plugin-wallet-adapter"; -``` - -Import the `AptosWalletAdapterProvider`: - -```js -import { AptosWalletAdapterProvider } from "@aptos-labs/wallet-adapter-react"; -``` - -Wrap your app with the Provider, pass it the plugins (wallets) you want to have on your app as an array, and include an autoConnect option (set to false by default): - -```js -const wallets = [new PetraWallet()]; - - -; -``` - -### Use - -On any page you want to use the wallet properties, import `useWallet` from `@aptos-labs/wallet-adapter-react`: - -```js -import { useWallet } from "@aptos-labs/wallet-adapter-react"; -``` - -You can then use the exported properties: - -```js -const { - connect, - account, - network, - connected, - disconnect, - wallet, - wallets, - signAndSubmitTransaction, - signTransaction, - signMessage, -} = useWallet(); -``` - -Finally, use the [examples](https://github.com/aptos-labs/aptos-wallet-adapter/tree/main/packages/wallet-adapter-react#examples) on the package README file to build more functionality into your dapps. diff --git a/developer-docs-site/docs/integration/wallet-adapter-for-wallets.md b/developer-docs-site/docs/integration/wallet-adapter-for-wallets.md deleted file mode 100644 index 86170317fb3d5..0000000000000 --- a/developer-docs-site/docs/integration/wallet-adapter-for-wallets.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "For Wallets" -id: "wallet-adapter-for-wallets" ---- - -# Wallet Adapter For Wallet Builders - -To gain from dapps in the Aptos Ecosystem and provide your users the functionality they are looking for in a wallet, your wallet plugin should follow the [Aptos Wallet Standard](../standards/wallets.md) and be built from the Aptos Wallet Adapter. - -The [wallet-adapter-plugin-template](https://github.com/aptos-labs/wallet-adapter-plugin-template) repository gives wallet builders a pre-made class with all required wallet functionality following the Aptos Wallet Standard for easy and fast development. - -## Configuration - -1. `git clone git@github.com:aptos-labs/wallet-adapter-plugin-template.git` -1. Open `src/index.ts` for editing. -1. Replace all `AptosWindow` references with: `Window` -1. Replace `AptosWalletName` with: `WalletName` -1. Replace `url` with your website URL. -1. Change `icon` to your wallet icon (pay attention to the required format). -1. Replace `window.aptos` with: `window.` - - Make sure the `Window Interface` has `` as a key (instead of `aptos`). -1. Open `__tests/index.test.tsx` and change `AptosWallet` to: `Wallet` -1. Run tests with `pnpm test` - all tests should pass. - -At this point, you have a ready wallet class with all required properties and functions to integrate with the Aptos Wallet Adapter. - -### Publish as a package - -The next step is to publish your wallet as an NPM package so dapps can install it as a dependency. Use one of the options below: - -[Creating and publishing scoped public packages](https://docs.npmjs.com/creating-and-publishing-scoped-public-packages) - -[Creating and publishing unscoped public packages](https://docs.npmjs.com/creating-and-publishing-unscoped-public-packages) - -:::tip -If your wallet provides functionality that is not included, you should open a pull request against `aptos-wallet-adapter` in the core package to have it support this functionality. See the `signTransaction` on the [wallet core package](https://github.com/aptos-labs/aptos-wallet-adapter/blob/main/packages/wallet-adapter-core/src/WalletCore.ts) for guidance. -::: - -### Add your name to the wallets list - -Once the package is published, create a pull request against the [aptos-wallet-adapter](https://github.com/aptos-labs/aptos-wallet-adapter) package and add your wallet name to the [supported wallet list](https://github.com/aptos-labs/aptos-wallet-adapter#supported-wallet-packages) on the README file as a URL to your NPM package. diff --git a/developer-docs-site/docs/move/book/SUMMARY.md b/developer-docs-site/docs/move/book/SUMMARY.md deleted file mode 100644 index 3d5e9bda02670..0000000000000 --- a/developer-docs-site/docs/move/book/SUMMARY.md +++ /dev/null @@ -1,46 +0,0 @@ -# The Move Programming Language - -[Introduction](introduction.md) - -## Getting Started - -- [Modules and Scripts](modules-and-scripts.md) -- [Move Tutorial](creating-coins.md) - -## Primitive Types - -- [Integers](integers.md) -- [Bool](bool.md) -- [Address](address.md) -- [Vector](vector.md) -- [Signer](signer.md) -- [References](references.md) -- [Tuples and Unit](tuples.md) - -## Basic Concepts - -- [Local Variables and Scopes](variables.md) -- [Equality](equality.md) -- [Abort and Assert](abort-and-assert.md) -- [Conditionals](conditionals.md) -- [While and Loop](loops.md) -- [Functions](functions.md) -- [Structs and Resources](structs-and-resources.md) -- [Constants](constants.md) -- [Generics](generics.md) -- [Type Abilities](abilities.md) -- [Uses and Aliases](uses.md) -- [Friends](friends.md) -- [Packages](packages.md) -- [Package Upgrades](package-upgrades.md) -- [Unit Tests](unit-testing.md) - -## Global Storage - -- [Global Storage Structure](global-storage-structure.md) -- [Global Storage Operators](global-storage-operators.md) - -## Reference - -- [Standard Library](standard-library.md) -- [Coding Conventions](coding-conventions.md) diff --git a/developer-docs-site/docs/move/book/abilities.md b/developer-docs-site/docs/move/book/abilities.md deleted file mode 100644 index 81ecc0dd2b025..0000000000000 --- a/developer-docs-site/docs/move/book/abilities.md +++ /dev/null @@ -1,244 +0,0 @@ -# Abilities - -Abilities are a typing feature in Move that control what actions are permissible for values of a given type. This system grants fine grained control over the "linear" typing behavior of values, as well as if and how values are used in global storage. This is implemented by gating access to certain bytecode instructions so that for a value to be used with the bytecode instruction, it must have the ability required (if one is required at all—not every instruction is gated by an ability). - - - -## The Four Abilities - -The four abilities are: - -* [`copy`](#copy) - * Allows values of types with this ability to be copied. -* [`drop`](#drop) - * Allows values of types with this ability to be popped/dropped. -* [`store`](#store) - * Allows values of types with this ability to exist inside a struct in global storage. -* [`key`](#key) - * Allows the type to serve as a key for global storage operations. - -### `copy` - -The `copy` ability allows values of types with that ability to be copied. It gates the ability to copy values out of local variables with the [`copy`](./variables.md#move-and-copy) operator and to copy values via references with [dereference `*e`](./references.md#reading-and-writing-through-references). - -If a value has `copy`, all values contained inside of that value have `copy`. - -### `drop` - -The `drop` ability allows values of types with that ability to be dropped. By dropped, we mean that value is not transferred and is effectively destroyed as the Move program executes. As such, this ability gates the ability to ignore values in a multitude of locations, including: -* not using the value in a local variable or parameter -* not using the value in a [sequence via `;`](./variables.md#expression-blocks) -* overwriting values in variables in [assignments](./variables.md#assignments) -* overwriting values via references when [writing `*e1 = e2`](./references.md#reading-and-writing-through-references). - -If a value has `drop`, all values contained inside of that value have `drop`. - -### `store` - -The `store` ability allows values of types with this ability to exist inside of a struct (resource) in global storage, *but* not necessarily as a top-level resource in global storage. This is the only ability that does not directly gate an operation. Instead it gates the existence in global storage when used in tandem with `key`. - -If a value has `store`, all values contained inside of that value have `store` - -### `key` - -The `key` ability allows the type to serve as a key for [global storage operations](./global-storage-operators.md). It gates all global storage operations, so in order for a type to be used with `move_to`, `borrow_global`, `move_from`, etc., the type must have the `key` ability. Note that the operations still must be used in the module where the `key` type is defined (in a sense, the operations are private to the defining module). - -If a value has `key`, all values contained inside of that value have `store`. This is the only ability with this sort of asymmetry. - -## Builtin Types - -Most primitive, builtin types have `copy`, `drop`, and `store` with the exception of `signer`, which just has `drop` - -* `bool`, `u8`, `u16`, `u32`, `u64`, `u128`, `u256`, and `address` all have `copy`, `drop`, and `store`. -* `signer` has `drop` - * Cannot be copied and cannot be put into global storage -* `vector` may have `copy`, `drop`, and `store` depending on the abilities of `T`. - * See [Conditional Abilities and Generic Types](#conditional-abilities-and-generic-types) for more details. -* Immutable references `&` and mutable references `&mut` both have `copy` and `drop`. - * This refers to copying and dropping the reference itself, not what they refer to. - * References cannot appear in global storage, hence they do not have `store`. - -None of the primitive types have `key`, meaning none of them can be used directly with the [global storage operations](./global-storage-operators.md). - -## Annotating Structs - -To declare that a `struct` has an ability, it is declared with `has ` after the struct name but before the fields. For example: - -```move -struct Ignorable has drop { f: u64 } -struct Pair has copy, drop, store { x: u64, y: u64 } -``` - -In this case: `Ignorable` has the `drop` ability. `Pair` has `copy`, `drop`, and `store`. - - -All of these abilities have strong guarantees over these gated operations. The operation can be performed on the value only if it has that ability; even if the value is deeply nested inside of some other collection! - -As such: when declaring a struct’s abilities, certain requirements are placed on the fields. All fields must satisfy these constraints. These rules are necessary so that structs satisfy the reachability rules for the abilities given above. If a struct is declared with the ability... - -* `copy`, all fields must have `copy`. -* `drop`, all fields must have `drop`. -* `store`, all fields must have `store`. -* `key`, all fields must have `store`. - * `key` is the only ability currently that doesn’t require itself. - -For example: - -```move -// A struct without any abilities -struct NoAbilities {} - -struct WantsCopy has copy { - f: NoAbilities, // ERROR 'NoAbilities' does not have 'copy' -} -``` - -and similarly: - -```move -// A struct without any abilities -struct NoAbilities {} - -struct MyResource has key { - f: NoAbilities, // Error 'NoAbilities' does not have 'store' -} -``` - -## Conditional Abilities and Generic Types - -When abilities are annotated on a generic type, not all instances of that type are guaranteed to have that ability. Consider this struct declaration: - -``` -struct Cup has copy, drop, store, key { item: T } -``` - -It might be very helpful if `Cup` could hold any type, regardless of its abilities. The type system can *see* the type parameter, so it should be able to remove abilities from `Cup` if it *sees* a type parameter that would violate the guarantees for that ability. - -This behavior might sound a bit confusing at first, but it might be more understandable if we think about collection types. We could consider the builtin type `vector` to have the following type declaration: - -``` -vector has copy, drop, store; -``` - -We want `vector`s to work with any type. We don't want separate `vector` types for different abilities. So what are the rules we would want? Precisely the same that we would want with the field rules above. So, it would be safe to copy a `vector` value only if the inner elements can be copied. It would be safe to ignore a `vector` value only if the inner elements can be ignored/dropped. And, it would be safe to put a `vector` in global storage only if the inner elements can be in global storage. - -To have this extra expressiveness, a type might not have all the abilities it was declared with depending on the instantiation of that type; instead, the abilities a type will have depends on both its declaration **and** its type arguments. For any type, type parameters are pessimistically assumed to be used inside of the struct, so the abilities are only granted if the type parameters meet the requirements described above for fields. Taking `Cup` from above as an example: - -* `Cup` has the ability `copy` only if `T` has `copy`. -* It has `drop` only if `T` has `drop`. -* It has `store` only if `T` has `store`. -* It has `key` only if `T` has `store`. - -Here are examples for this conditional system for each ability: - -### Example: conditional `copy` - -``` -struct NoAbilities {} -struct S has copy, drop { f: bool } -struct Cup has copy, drop, store { item: T } - -fun example(c_x: Cup, c_s: Cup) { - // Valid, 'Cup' has 'copy' because 'u64' has 'copy' - let c_x2 = copy c_x; - // Valid, 'Cup' has 'copy' because 'S' has 'copy' - let c_s2 = copy c_s; -} - -fun invalid(c_account: Cup, c_n: Cup) { - // Invalid, 'Cup' does not have 'copy'. - // Even though 'Cup' was declared with copy, the instance does not have 'copy' - // because 'signer' does not have 'copy' - let c_account2 = copy c_account; - // Invalid, 'Cup' does not have 'copy' - // because 'NoAbilities' does not have 'copy' - let c_n2 = copy c_n; -} -``` - -### Example: conditional `drop` - -``` -struct NoAbilities {} -struct S has copy, drop { f: bool } -struct Cup has copy, drop, store { item: T } - -fun unused() { - Cup { item: true }; // Valid, 'Cup' has 'drop' - Cup { item: S { f: false }}; // Valid, 'Cup' has 'drop' -} - -fun left_in_local(c_account: Cup): u64 { - let c_b = Cup { item: true }; - let c_s = Cup { item: S { f: false }}; - // Valid return: 'c_account', 'c_b', and 'c_s' have values - // but 'Cup', 'Cup', and 'Cup' have 'drop' - 0 -} - -fun invalid_unused() { - // Invalid, Cannot ignore 'Cup' because it does not have 'drop'. - // Even though 'Cup' was declared with 'drop', the instance does not have 'drop' - // because 'NoAbilities' does not have 'drop' - Cup { item: NoAbilities {}}; -} - -fun invalid_left_in_local(): u64 { - let c_n = Cup { item: NoAbilities {}}; - // Invalid return: 'c_n' has a value - // and 'Cup' does not have 'drop' - 0 -} -``` - -### Example: conditional `store` - -``` -struct Cup has copy, drop, store { item: T } - -// 'MyInnerResource' is declared with 'store' so all fields need 'store' -struct MyInnerResource has store { - yes: Cup, // Valid, 'Cup' has 'store' - // no: Cup, Invalid, 'Cup' does not have 'store' -} - -// 'MyResource' is declared with 'key' so all fields need 'store' -struct MyResource has key { - yes: Cup, // Valid, 'Cup' has 'store' - inner: Cup, // Valid, 'Cup' has 'store' - // no: Cup, Invalid, 'Cup' does not have 'store' -} -``` - -### Example: conditional `key` - -``` -struct NoAbilities {} -struct MyResource has key { f: T } - -fun valid(account: &signer) acquires MyResource { - let addr = signer::address_of(account); - // Valid, 'MyResource' has 'key' - let has_resource = exists>(addr); - if (!has_resource) { - // Valid, 'MyResource' has 'key' - move_to(account, MyResource { f: 0 }) - }; - // Valid, 'MyResource' has 'key' - let r = borrow_global_mut>(addr) - r.f = r.f + 1; -} - -fun invalid(account: &signer) { - // Invalid, 'MyResource' does not have 'key' - let has_it = exists>(addr); - // Invalid, 'MyResource' does not have 'key' - let NoAbilities {} = move_from(addr); - // Invalid, 'MyResource' does not have 'key' - move_to(account, NoAbilities {}); - // Invalid, 'MyResource' does not have 'key' - borrow_global(addr); -} -``` diff --git a/developer-docs-site/docs/move/book/abort-and-assert.md b/developer-docs-site/docs/move/book/abort-and-assert.md deleted file mode 100644 index eb1fab905f537..0000000000000 --- a/developer-docs-site/docs/move/book/abort-and-assert.md +++ /dev/null @@ -1,207 +0,0 @@ -# Abort and Assert - -[`return`](./functions.md) and `abort` are two control flow constructs that end execution, one for -the current function and one for the entire transaction. - -More information on [`return` can be found in the linked section](./functions.md) - -## `abort` - -`abort` is an expression that takes one argument: an **abort code** of type `u64`. For example: - -```move -abort 42 -``` - -The `abort` expression halts execution of the current function and reverts all changes made to global -state by the current transaction. There is no mechanism for "catching" or otherwise handling an -`abort`. - -Luckily, in Move transactions are all or nothing, meaning any changes to global storage are made all -at once only if the transaction succeeds. Because of this transactional commitment of changes, after -an abort there is no need to worry about backing out changes. While this approach is lacking in -flexibility, it is incredibly simple and predictable. - -Similar to [`return`](./functions.md), `abort` is useful for exiting control flow when some -condition cannot be met. - -In this example, the function will pop two items off of the vector, but will abort early if the -vector does not have two items - -```move= -use std::vector; -fun pop_twice(v: &mut vector): (T, T) { - if (vector::length(v) < 2) abort 42; - - (vector::pop_back(v), vector::pop_back(v)) -} -``` - -This is even more useful deep inside a control-flow construct. For example, this function checks -that all numbers in the vector are less than the specified `bound`. And aborts otherwise - -```move= -use std::vector; -fun check_vec(v: &vector, bound: u64) { - let i = 0; - let n = vector::length(v); - while (i < n) { - let cur = *vector::borrow(v, i); - if (cur > bound) abort 42; - i = i + 1; - } -} -``` - -### `assert` - -`assert` is a builtin, macro-like operation provided by the Move compiler. It takes two arguments, a -condition of type `bool` and a code of type `u64` - -```move -assert!(condition: bool, code: u64) -``` - -Since the operation is a macro, it must be invoked with the `!`. This is to convey that the -arguments to `assert` are call-by-expression. In other words, `assert` is not a normal function and -does not exist at the bytecode level. It is replaced inside the compiler with - -```move -if (condition) () else abort code -``` - -`assert` is more commonly used than just `abort` by itself. The `abort` examples above can be -rewritten using `assert` - -```move= -use std::vector; -fun pop_twice(v: &mut vector): (T, T) { - assert!(vector::length(v) >= 2, 42); // Now uses 'assert' - - (vector::pop_back(v), vector::pop_back(v)) -} -``` - -and - -```move= -use std::vector; -fun check_vec(v: &vector, bound: u64) { - let i = 0; - let n = vector::length(v); - while (i < n) { - let cur = *vector::borrow(v, i); - assert!(cur <= bound, 42); // Now uses 'assert' - i = i + 1; - } -} -``` - -Note that because the operation is replaced with this `if-else`, the argument for the `code` is not -always evaluated. For example: - -```move -assert!(true, 1 / 0) -``` - -Will not result in an arithmetic error, it is equivalent to - -```move -if (true) () else (1 / 0) -``` - -So the arithmetic expression is never evaluated! - -### Abort codes in the Move VM - -When using `abort`, it is important to understand how the `u64` code will be used by the VM. - -Normally, after successful execution, the Move VM produces a change-set for the changes made to -global storage (added/removed resources, updates to existing resources, etc). - -If an `abort` is reached, the VM will instead indicate an error. Included in that error will be two -pieces of information: - -- The module that produced the abort (address and name) -- The abort code. - -For example - -```move= -address 0x2 { -module example { - public fun aborts() { - abort 42 - } -} -} - -script { - fun always_aborts() { - 0x2::example::aborts() - } -} -``` - -If a transaction, such as the script `always_aborts` above, calls `0x2::example::aborts`, the VM -would produce an error that indicated the module `0x2::example` and the code `42`. - -This can be useful for having multiple aborts being grouped together inside a module. - -In this example, the module has two separate error codes used in multiple functions - -```move= -address 0x42 { -module example { - - use std::vector; - - const EMPTY_VECTOR: u64 = 0; - const INDEX_OUT_OF_BOUNDS: u64 = 1; - - // move i to j, move j to k, move k to i - public fun rotate_three(v: &mut vector, i: u64, j: u64, k: u64) { - let n = vector::length(v); - assert!(n > 0, EMPTY_VECTOR); - assert!(i < n, INDEX_OUT_OF_BOUNDS); - assert!(j < n, INDEX_OUT_OF_BOUNDS); - assert!(k < n, INDEX_OUT_OF_BOUNDS); - - vector::swap(v, i, k); - vector::swap(v, j, k); - } - - public fun remove_twice(v: &mut vector, i: u64, j: u64): (T, T) { - let n = vector::length(v); - assert!(n > 0, EMPTY_VECTOR); - assert!(i < n, INDEX_OUT_OF_BOUNDS); - assert!(j < n, INDEX_OUT_OF_BOUNDS); - assert!(i > j, INDEX_OUT_OF_BOUNDS); - - (vector::remove(v, i), vector::remove(v, j)) - } -} -} -``` - -## The type of `abort` - -The `abort i` expression can have any type! This is because both constructs break from the normal -control flow, so they never need to evaluate to the value of that type. - -The following are not useful, but they will type check - -```move -let y: address = abort 0; -``` - -This behavior can be helpful in situations where you have a branching instruction that produces a -value on some branches, but not all. For example: - -```move -let b = - if (x == 0) false - else if (x == 1) true - else abort 42; -// ^^^^^^^^ `abort 42` has type `bool` -``` diff --git a/developer-docs-site/docs/move/book/address.md b/developer-docs-site/docs/move/book/address.md deleted file mode 100644 index 34a2883049bb2..0000000000000 --- a/developer-docs-site/docs/move/book/address.md +++ /dev/null @@ -1,73 +0,0 @@ -# Address - -`address` is a built-in type in Move that is used to represent locations (sometimes called accounts) in global storage. An `address` value is a 256-bit (32-byte) identifier. At a given address, two things can be stored: [Modules](./modules-and-scripts.md) and [Resources](./structs-and-resources.md). - -Although an `address` is a 256-bit integer under the hood, Move addresses are intentionally opaque---they cannot be created from integers, they do not support arithmetic operations, and they cannot be modified. Even though there might be interesting programs that would use such a feature (e.g., pointer arithmetic in C fills a similar niche), Move does not allow this dynamic behavior because it has been designed from the ground up to support static verification. - -You can use runtime address values (values of type `address`) to access resources at that address. You *cannot* access modules at runtime via address values. - -## Addresses and Their Syntax - -Addresses come in two flavors, named or numerical. The syntax for a named address follows the -same rules for any named identifier in Move. The syntax of a numerical address is not restricted -to hex-encoded values, and any valid [`u256` numerical value](./integers.md) can be used as an -address value, e.g., `42`, `0xCAFE`, and `2021` are all valid numerical address -literals. - -To distinguish when an address is being used in an expression context or not, the -syntax when using an address differs depending on the context where it's used: -* When an address is used as an expression the address must be prefixed by the `@` character, i.e., [`@`](./integers.md) or `@`. -* Outside of expression contexts, the address may be written without the leading `@` character, i.e., [``](./integers.md) or ``. - -In general, you can think of `@` as an operator that takes an address from being a namespace item to being an expression item. - -## Named Addresses - -Named addresses are a feature that allow identifiers to be used in place of -numerical values in any spot where addresses are used, and not just at the -value level. Named addresses are declared and bound as top level elements -(outside of modules and scripts) in Move Packages, or passed as arguments -to the Move compiler. - -Named addresses only exist at the source language level and will be fully -substituted for their value at the bytecode level. Because of this, modules -and module members _must_ be accessed through the module's named address -and not through the numerical value assigned to the named address during -compilation, e.g., `use my_addr::foo` is _not_ equivalent to `use 0x2::foo` -even if the Move program is compiled with `my_addr` set to `0x2`. This -distinction is discussed in more detail in the section on [Modules and -Scripts](./modules-and-scripts.md). - -### Examples - -```move -let a1: address = @0x1; // shorthand for 0x0000000000000000000000000000000000000000000000000000000000000001 -let a2: address = @0x42; // shorthand for 0x0000000000000000000000000000000000000000000000000000000000000042 -let a3: address = @0xDEADBEEF; // shorthand for 0x00000000000000000000000000000000000000000000000000000000DEADBEEF -let a4: address = @0x000000000000000000000000000000000000000000000000000000000000000A; -let a5: address = @std; // Assigns `a5` the value of the named address `std` -let a6: address = @66; -let a7: address = @0x42; - -module 66::some_module { // Not in expression context, so no @ needed - use 0x1::other_module; // Not in expression context so no @ needed - use std::vector; // Can use a named address as a namespace item when using other modules - ... -} - -module std::other_module { // Can use a named address as a namespace item to declare a module - ... -} -``` - -## Global Storage Operations - -The primary purpose of `address` values are to interact with the global storage operations. - -`address` values are used with the `exists`, `borrow_global`, `borrow_global_mut`, and `move_from` [operations](./global-storage-operators.md). - -The only global storage operation that *does not* use `address` is `move_to`, which uses [`signer`](./signer.md). - -## Ownership - -As with the other scalar values built-in to the language, `address` values are implicitly copyable, meaning they can be copied without an explicit instruction such as [`copy`](./variables.md#move-and-copy). diff --git a/developer-docs-site/docs/move/book/bool.md b/developer-docs-site/docs/move/book/bool.md deleted file mode 100644 index 6423a8c6394c0..0000000000000 --- a/developer-docs-site/docs/move/book/bool.md +++ /dev/null @@ -1,33 +0,0 @@ -# Bool - -`bool` is Move's primitive type for boolean `true` and `false` values. - -## Literals - -Literals for `bool` are either `true` or `false`. - -## Operations - -### Logical - -`bool` supports three logical operations: - -| Syntax | Description | Equivalent Expression | -| ------------------------- | ---------------------------- | ------------------------------------------------------------------- | -| `&&` | short-circuiting logical and | `p && q` is equivalent to `if (p) q else false` | -| || | short-circuiting logical or | p || q is equivalent to `if (p) true else q` | -| `!` | logical negation | `!p` is equivalent to `if (p) false else true` | - -### Control Flow - -`bool` values are used in several of Move's control-flow constructs: - -- [`if (bool) { ... }`](./conditionals.md) -- [`while (bool) { .. }`](./loops.md) -- [`assert!(bool, u64)`](./abort-and-assert.md) - -## Ownership - -As with the other scalar values built-in to the language, boolean values are implicitly copyable, -meaning they can be copied without an explicit instruction such as -[`copy`](./variables.md#move-and-copy). diff --git a/developer-docs-site/docs/move/book/coding-conventions.md b/developer-docs-site/docs/move/book/coding-conventions.md deleted file mode 100644 index 654557f22c896..0000000000000 --- a/developer-docs-site/docs/move/book/coding-conventions.md +++ /dev/null @@ -1,77 +0,0 @@ -# Move Coding Conventions - -This section lays out some basic coding conventions for Move that the Move team has found helpful. These are only recommendations, and you should feel free to use other formatting guidelines and conventions if you have a preference for them. - -## Naming - -- **Module names**: should be lower snake case, e.g., `fixed_point32`, `vector`. -- **Type names**: should be camel case if they are not a native type, e.g., `Coin`, `RoleId`. -- **Function names**: should be lower snake case, e.g., `destroy_empty`. -- **Constant names**: should be upper camel case and begin with an `E` if they represent error codes (e.g., `EIndexOutOfBounds`) and upper snake case if they represent a non-error value (e.g., `MIN_STAKE`). -- -- **Generic type names**: should be descriptive, or anti-descriptive where appropriate, e.g., `T` or `Element` for the Vector generic type parameter. Most of the time the "main" type in a module should be the same name as the module e.g., `option::Option`, `fixed_point32::FixedPoint32`. -- **Module file names**: should be the same as the module name e.g., `option.move`. -- **Script file names**: should be lower snake case and should match the name of the “main” function in the script. -- **Mixed file names**: If the file contains multiple modules and/or scripts, the file name should be lower snake case, where the name does not match any particular module/script inside. - -## Imports - -- All module `use` statements should be at the top of the module. -- Functions should be imported and used fully qualified from the module in which they are declared, and not imported at the top level. -- Types should be imported at the top-level. Where there are name clashes, `as` should be used to rename the type locally as appropriate. - -For example, if there is a module: - -```move -module 0x1::foo { - struct Foo { } - const CONST_FOO: u64 = 0; - public fun do_foo(): Foo { Foo{} } - ... -} -``` - -this would be imported and used as: - -```move -module 0x1::bar { - use 0x1::foo::{Self, Foo}; - - public fun do_bar(x: u64): Foo { - if (x == 10) { - foo::do_foo() - } else { - abort 0 - } - } - ... -} -``` - -And, if there is a local name-clash when importing two modules: - -```move -module other_foo { - struct Foo {} - ... -} - -module 0x1::importer { - use 0x1::other_foo::Foo as OtherFoo; - use 0x1::foo::Foo; - ... -} -``` - -## Comments - -- Each module, struct, and public function declaration should be commented. -- Move has doc comments `///`, regular single-line comments `//`, block comments `/* */`, and block doc comments `/** */`. - -## Formatting - -The Move team plans to write an autoformatter to enforce formatting conventions. However, in the meantime: - -- Four space indentation should be used except for `script` and `address` blocks whose contents should not be indented. -- Lines should be broken if they are longer than 100 characters. -- Structs and constants should be declared before all functions in a module. diff --git a/developer-docs-site/docs/move/book/conditionals.md b/developer-docs-site/docs/move/book/conditionals.md deleted file mode 100644 index d53b268ac5922..0000000000000 --- a/developer-docs-site/docs/move/book/conditionals.md +++ /dev/null @@ -1,62 +0,0 @@ -# Conditionals - -An `if` expression specifies that some code should only be evaluated if a certain condition is true. For example: - -```move -if (x > 5) x = x - 5 -``` - -The condition must be an expression of type `bool`. - -An `if` expression can optionally include an `else` clause to specify another expression to evaluate when the condition is false. - -```move -if (y <= 10) y = y + 1 else y = 10 -``` - -Either the "true" branch or the "false" branch will be evaluated, but not both. Either branch can be a single expression or an expression block. - -The conditional expressions may produce values so that the `if` expression has a result. - -```move -let z = if (x < 100) x else 100; -``` - -The expressions in the true and false branches must have compatible types. For example: - -```move= -// x and y must be u64 integers -let maximum: u64 = if (x > y) x else y; - -// ERROR! branches different types -let z = if (maximum < 10) 10u8 else 100u64; - -// ERROR! branches different types, as default false-branch is () not u64 -if (maximum >= 10) maximum; -``` - -If the `else` clause is not specified, the false branch defaults to the unit value. The following are equivalent: - -```move -if (condition) true_branch // implied default: else () -if (condition) true_branch else () -``` - -Commonly, [`if` expressions](./conditionals.md) are used in conjunction with expression blocks. - -```move -let maximum = if (x > y) x else y; -if (maximum < 10) { - x = x + 10; - y = y + 10; -} else if (x >= 10 && y >= 10) { - x = x - 10; - y = y - 10; -} -``` - -## Grammar for Conditionals - -> *if-expression* → **if (** *expression* **)** *expression* *else-clause**opt* - -> *else-clause* → **else** *expression* diff --git a/developer-docs-site/docs/move/book/constants.md b/developer-docs-site/docs/move/book/constants.md deleted file mode 100644 index c59f0107c57b1..0000000000000 --- a/developer-docs-site/docs/move/book/constants.md +++ /dev/null @@ -1,112 +0,0 @@ -# Constants - -Constants are a way of giving a name to shared, static values inside of a `module` or `script`. - -The constant's must be known at compilation. The constant's value is stored in the compiled module -or script. And each time the constant is used, a new copy of that value is made. - -## Declaration - -Constant declarations begin with the `const` keyword, followed by a name, a type, and a value. They -can exist in either a script or module - -```text -const : = ; -``` - -For example - -```move= -script { - - const MY_ERROR_CODE: u64 = 0; - - fun main(input: u64) { - assert!(input > 0, MY_ERROR_CODE); - } - -} - -address 0x42 { -module example { - - const MY_ADDRESS: address = @0x42; - - public fun permissioned(s: &signer) { - assert!(std::signer::address_of(s) == MY_ADDRESS, 0); - } - -} -} -``` - -## Naming - -Constants must start with a capital letter `A` to `Z`. After the first letter, constant names can -contain underscores `_`, letters `a` to `z`, letters `A` to `Z`, or digits `0` to `9`. - -```move -const FLAG: bool = false; -const MY_ERROR_CODE: u64 = 0; -const ADDRESS_42: address = @0x42; -``` - -Even though you can use letters `a` to `z` in a constant. The -[general style guidelines](./coding-conventions.md) are to use just uppercase letters `A` to `Z`, -with underscores `_` between each word. - -This naming restriction of starting with `A` to `Z` is in place to give room for future language -features. It may or may not be removed later. - -## Visibility - -`public` constants are not currently supported. `const` values can be used only in the declaring -module. - -## Valid Expressions - -Currently, constants are limited to the primitive types `bool`, `u8`, `u16`, `u32`, `u64`, `u128`, `u256`, `address`, and -`vector`. Future support for other `vector` values (besides the "string"-style literals) will -come later. - -### Values - -Commonly, `const`s are assigned a simple value, or literal, of their type. For example - -```move -const MY_BOOL: bool = false; -const MY_ADDRESS: address = @0x70DD; -const BYTES: vector = b"hello world"; -const HEX_BYTES: vector = x"DEADBEEF"; -``` - -### Complex Expressions - -In addition to literals, constants can include more complex expressions, as long as the compiler is -able to reduce the expression to a value at compile time. - -Currently, equality operations, all boolean operations, all bitwise operations, and all arithmetic -operations can be used. - -```move -const RULE: bool = true && false; -const CAP: u64 = 10 * 100 + 1; -const SHIFTY: u8 = { - (1 << 1) * (1 << 2) * (1 << 3) * (1 << 4) -}; -const HALF_MAX: u128 = 340282366920938463463374607431768211455 / 2; -const REM: u256 = 57896044618658097711785492504343953926634992332820282019728792003956564819968 % 654321; -const EQUAL: bool = 1 == 1; -``` - -If the operation would result in a runtime exception, the compiler will give an error that it is -unable to generate the constant's value - -```move -const DIV_BY_ZERO: u64 = 1 / 0; // error! -const SHIFT_BY_A_LOT: u64 = 1 << 100; // error! -const NEGATIVE_U64: u64 = 0 - 1; // error! -``` - -Note that constants cannot currently refer to other constants. This feature, along with support for -other expressions, will be added in the future. diff --git a/developer-docs-site/docs/move/book/creating-coins.md b/developer-docs-site/docs/move/book/creating-coins.md deleted file mode 100644 index 6922938975d43..0000000000000 --- a/developer-docs-site/docs/move/book/creating-coins.md +++ /dev/null @@ -1,3 +0,0 @@ -# Move Tutorial - -Please refer to the [Move Core Language Tutorial](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/move-tutorial). diff --git a/developer-docs-site/docs/move/book/equality.md b/developer-docs-site/docs/move/book/equality.md deleted file mode 100644 index dbabfa66c6677..0000000000000 --- a/developer-docs-site/docs/move/book/equality.md +++ /dev/null @@ -1,164 +0,0 @@ -# Equality - -Move supports two equality operations `==` and `!=` - -## Operations - -| Syntax | Operation | Description | -| ------ | --------- | --------------------------------------------------------------------------- | -| `==` | equal | Returns `true` if the two operands have the same value, `false` otherwise | -| `!=` | not equal | Returns `true` if the two operands have different values, `false` otherwise | - -### Typing - -Both the equal (`==`) and not-equal (`!=`) operations only work if both operands are the same type - -```move -0 == 0; // `true` -1u128 == 2u128; // `false` -b"hello" != x"00"; // `true` -``` - -Equality and non-equality also work over user defined types! - -```move= -address 0x42 { -module example { - struct S has copy, drop { f: u64, s: vector } - - fun always_true(): bool { - let s = S { f: 0, s: b"" }; - // parens are not needed but added for clarity in this example - (copy s) == s - } - - fun always_false(): bool { - let s = S { f: 0, s: b"" }; - // parens are not needed but added for clarity in this example - (copy s) != s - } -} -} -``` - -If the operands have different types, there is a type checking error - -```move -1u8 == 1u128; // ERROR! -// ^^^^^ expected an argument of type 'u8' -b"" != 0; // ERROR! -// ^ expected an argument of type 'vector' -``` - -### Typing with references - -When comparing [references](./references.md), the type of the reference (immutable or mutable) does -not matter. This means that you can compare an immutable `&` reference with a mutable one `&mut` of -the same underlying type. - -```move -let i = &0; -let m = &mut 1; - -i == m; // `false` -m == i; // `false` -m == m; // `true` -i == i; // `true` -``` - -The above is equivalent to applying an explicit freeze to each mutable reference where needed - -```move -let i = &0; -let m = &mut 1; - -i == freeze(m); // `false` -freeze(m) == i; // `false` -m == m; // `true` -i == i; // `true` -``` - -But again, the underlying type must be the same type - -```move -let i = &0; -let s = &b""; - -i == s; // ERROR! -// ^ expected an argument of type '&u64' -``` - -## Restrictions - -Both `==` and `!=` consume the value when comparing them. As a result, the type system enforces that -the type must have [`drop`](./abilities.md). Recall that without the -[`drop` ability](./abilities.md), ownership must be transferred by the end of the function, and such -values can only be explicitly destroyed within their declaring module. If these were used directly -with either equality `==` or non-equality `!=`, the value would be destroyed which would break -[`drop` ability](./abilities.md) safety guarantees! - -```move= -address 0x42 { -module example { - struct Coin has store { value: u64 } - fun invalid(c1: Coin, c2: Coin) { - c1 == c2 // ERROR! -// ^^ ^^ These resources would be destroyed! - } -} -} -``` - -But, a programmer can _always_ borrow the value first instead of directly comparing the value, and -reference types have the [`drop` ability](./abilities.md). For example - -```move= -address 0x42 { -module example { - struct Coin as store { value: u64 } - fun swap_if_equal(c1: Coin, c2: Coin): (Coin, Coin) { - let are_equal = &c1 == &c2; // valid - if (are_equal) (c2, c1) else (c1, c2) - } -} -} -``` - -## Avoid Extra Copies - -While a programmer _can_ compare any value whose type has [`drop`](./abilities.md), a programmer -should often compare by reference to avoid expensive copies. - -```move= -let v1: vector = function_that_returns_vector(); -let v2: vector = function_that_returns_vector(); -assert!(copy v1 == copy v2, 42); -// ^^^^ ^^^^ -use_two_vectors(v1, v2); - -let s1: Foo = function_that_returns_large_struct(); -let s2: Foo = function_that_returns_large_struct(); -assert!(copy s1 == copy s2, 42); -// ^^^^ ^^^^ -use_two_foos(s1, s2); -``` - -This code is perfectly acceptable (assuming `Foo` has [`drop`](./abilities.md)), just not efficient. -The highlighted copies can be removed and replaced with borrows - -```move= -let v1: vector = function_that_returns_vector(); -let v2: vector = function_that_returns_vector(); -assert!(&v1 == &v2, 42); -// ^ ^ -use_two_vectors(v1, v2); - -let s1: Foo = function_that_returns_large_struct(); -let s2: Foo = function_that_returns_large_struct(); -assert!(&s1 == &s2, 42); -// ^ ^ -use_two_foos(s1, s2); -``` - -The efficiency of the `==` itself remains the same, but the `copy`s are removed and thus the program -is more efficient. diff --git a/developer-docs-site/docs/move/book/friends.md b/developer-docs-site/docs/move/book/friends.md deleted file mode 100644 index 1dab74fe4b873..0000000000000 --- a/developer-docs-site/docs/move/book/friends.md +++ /dev/null @@ -1,131 +0,0 @@ -# Friends - -The `friend` syntax is used to declare modules that are trusted by the current module. -A trusted module is allowed to call any function defined in the current module that have the `public(friend)` visibility. -For details on function visibilities, please refer to the *Visibility* section in [Functions](./functions.md). - -## Friend declaration - -A module can declare other modules as friends via friend declaration statements, in the format of - -- `friend ` — friend declaration using fully qualified module name like the example below, or - - ```move - address 0x42 { - module a { - friend 0x42::b; - } - } - ``` - -- `friend ` — friend declaration using a module name alias, where the module alias is introduced via the `use` statement. - - ```move - address 0x42 { - module a { - use 0x42::b; - friend b; - } - } - ``` - -A module may have multiple friend declarations, and the union of all the friend modules forms the friend list. -In the example below, both `0x42::B` and `0x42::C` are considered as friends of `0x42::A`. - -```move -address 0x42 { -module a { - friend 0x42::b; - friend 0x42::c; -} -} -``` - -Unlike `use` statements, `friend` can only be declared in the module scope and not in the expression block scope. -`friend` declarations may be located anywhere a top-level construct (e.g., `use`, `function`, `struct`, etc.) is allowed. -However, for readability, it is advised to place friend declarations near the beginning of the module definition. - -Note that the concept of friendship does not apply to Move scripts: -- A Move script cannot declare `friend` modules as doing so is considered meaningless: there is no mechanism to call the function defined in a script. -- A Move module cannot declare `friend` scripts as well because scripts are ephemeral code snippets that are never published to global storage. - -### Friend declaration rules -Friend declarations are subject to the following rules: - -- A module cannot declare itself as a friend. - - ```move= - address 0x42 { - module m { friend Self; // ERROR! } - // ^^^^ Cannot declare the module itself as a friend - } - - address 0x43 { - module m { friend 0x43::M; // ERROR! } - // ^^^^^^^ Cannot declare the module itself as a friend - } - ``` - -- Friend modules must be known by the compiler - - ```move= - address 0x42 { - module m { friend 0x42::nonexistent; // ERROR! } - // ^^^^^^^^^^^^^^^^^ Unbound module '0x42::nonexistent' - } - ``` - -- Friend modules must be within the same account address. (Note: this is not a technical requirement but rather a policy decision which *may* be relaxed later.) - - ```move= - address 0x42 { - module m {} - } - - address 0x43 { - module n { friend 0x42::m; // ERROR! } - // ^^^^^^^ Cannot declare modules out of the current address as a friend - } - ``` - -- Friends relationships cannot create cyclic module dependencies. - - Cycles are not allowed in the friend relationships, e.g., the relation `0x2::a` friends `0x2::b` friends `0x2::c` friends `0x2::a` is not allowed. -More generally, declaring a friend module adds a dependency upon the current module to the friend module (because the purpose is for the friend to call functions in the current module). -If that friend module is already used, either directly or transitively, a cycle of dependencies would be created. - ```move= - address 0x2 { - module a { - use 0x2::c; - friend 0x2::b; - - public fun a() { - c::c() - } - } - - module b { - friend 0x2::c; // ERROR! - // ^^^^^^ This friend relationship creates a dependency cycle: '0x2::b' is a friend of '0x2::a' uses '0x2::c' is a friend of '0x2::b' - } - - module c { - public fun c() {} - } - } - ``` - -- The friend list for a module cannot contain duplicates. - - ```move= - address 0x42 { - module a {} - - module m { - use 0x42::a as aliased_a; - friend 0x42::A; - friend aliased_a; // ERROR! - // ^^^^^^^^^ Duplicate friend declaration '0x42::a'. Friend declarations in a module must be unique - } - } - ``` diff --git a/developer-docs-site/docs/move/book/functions.md b/developer-docs-site/docs/move/book/functions.md deleted file mode 100644 index c518c456fc83b..0000000000000 --- a/developer-docs-site/docs/move/book/functions.md +++ /dev/null @@ -1,594 +0,0 @@ -# Functions - -Function syntax in Move is shared between module functions and script functions. Functions inside of modules are reusable, whereas script functions are only used once to invoke a transaction. - -## Declaration - -Functions are declared with the `fun` keyword followed by the function name, type parameters, parameters, a return type, acquires annotations, and finally the function body. - -```text -fun <[type_parameters: constraint],*>([identifier: type],*): -``` - -For example - -```move -fun foo(x: u64, y: T1, z: T2): (T2, T1, u64) { (z, y, x) } -``` - -### Visibility - -Module functions, by default, can only be called within the same module. These internal (sometimes called private) functions cannot be called from other modules or from scripts. - -```move= -address 0x42 { -module m { - fun foo(): u64 { 0 } - fun calls_foo(): u64 { foo() } // valid -} - -module other { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' is internal to '0x42::m' - } -} -} - -script { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' is internal to '0x42::m' - } -} -``` - -To allow access from other modules or from scripts, the function must be declared `public` or `public(friend)`. - -#### `public` visibility - -A `public` function can be called by *any* function defined in *any* module or script. As shown in the following example, a `public` function can be called by: -- other functions defined in the same module, -- functions defined in another module, or -- the function defined in a script. - -There are also no restrictions for what the argument types a public function can take and its return type. - -```move= -address 0x42 { -module m { - public fun foo(): u64 { 0 } - fun calls_foo(): u64 { foo() } // valid -} - -module other { - fun calls_m_foo(): u64 { - 0x42::m::foo() // valid - } -} -} - -script { - fun calls_m_foo(): u64 { - 0x42::m::foo() // valid - } -} -``` - -#### `public(friend)` visibility - -The `public(friend)` visibility modifier is a more restricted form of the `public` modifier to give more control about where a function can be used. A `public(friend)` function can be called by: -- other functions defined in the same module, or -- functions defined in modules which are explicitly specified in the **friend list** (see [Friends](./friends.md) on how to specify the friend list). - -Note that since we cannot declare a script to be a friend of a module, the functions defined in scripts can never call a `public(friend)` function. - -```move= -address 0x42 { -module m { - friend 0x42::n; // friend declaration - public(friend) fun foo(): u64 { 0 } - fun calls_foo(): u64 { foo() } // valid -} - -module n { - fun calls_m_foo(): u64 { - 0x42::m::foo() // valid - } -} - -module other { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' can only be called from a 'friend' of module '0x42::m' - } -} -} - -script { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' can only be called from a 'friend' of module '0x42::m' - } -} -``` - -### `entry` modifier - -The `entry` modifier is designed to allow module functions to be safely and directly invoked much like scripts. This allows module writers to specify which functions can be invoked to begin execution. The module writer then knows that any non-`entry` function will be called from a Move program already in execution. - -Essentially, `entry` functions are the "main" functions of a module, and they specify where Move programs start executing. - -Note though, an `entry` function _can_ still be called by other Move functions. So while they _can_ serve as the start of a Move program, they aren't restricted to that case. - -For example: - -```move= -address 0x42 { -module m { - public entry fun foo(): u64 { 0 } - fun calls_foo(): u64 { foo() } // valid! -} - -module n { - fun calls_m_foo(): u64 { - 0x42::m::foo() // valid! - } -} - -module other { - public entry fun calls_m_foo(): u64 { - 0x42::m::foo() // valid! - } -} -} - -script { - fun calls_m_foo(): u64 { - 0x42::m::foo() // valid! - } -} -``` - -Even internal functions can be marked as `entry`! This lets you guarantee that the function is called only at the beginning of execution (assuming you do not call it elsewhere in your module) - -```move= -address 0x42 { -module m { - entry fun foo(): u64 { 0 } // valid! entry functions do not have to be public -} - -module n { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' is internal to '0x42::m' - } -} - -module other { - public entry fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' is internal to '0x42::m' - } -} -} - -script { - fun calls_m_foo(): u64 { - 0x42::m::foo() // ERROR! -// ^^^^^^^^^^^^ 'foo' is internal to '0x42::m' - } -} -``` - -Entry functions can take primitive types, String, and vector arguments but cannot take Structs (e.g. Option). They also -must not have any return values. - -### Name - -Function names can start with letters `a` to `z` or letters `A` to `Z`. After the first character, function names can contain underscores `_`, letters `a` to `z`, letters `A` to `Z`, or digits `0` to `9`. - -```move -// all valid -fun FOO() {} -fun bar_42() {} -fun bAZ19() {} - -// invalid -fun _bAZ19() {} // Function names cannot start with '_' -``` - -### Type Parameters - -After the name, functions can have type parameters - -```move -fun id(x: T): T { x } -fun example(x: T1, y: T2): (T1, T1, T2) { (copy x, x, y) } -``` - -For more details, see [Move generics](./generics.md). - -### Parameters - -Functions parameters are declared with a local variable name followed by a type annotation - -```move -fun add(x: u64, y: u64): u64 { x + y } -``` - -We read this as `x` has type `u64` - -A function does not have to have any parameters at all. - -```move -fun useless() { } -``` - -This is very common for functions that create new or empty data structures - -```move= -address 0x42 { -module example { - struct Counter { count: u64 } - - fun new_counter(): Counter { - Counter { count: 0 } - } - -} -} -``` - -### Acquires - -When a function accesses a resource using `move_from`, `borrow_global`, or `borrow_global_mut`, the function must indicate that it `acquires` that resource. This is then used by Move's type system to ensure the references into global storage are safe, specifically that there are no dangling references into global storage. - -```move= -address 0x42 { -module example { - - struct Balance has key { value: u64 } - - public fun add_balance(s: &signer, value: u64) { - move_to(s, Balance { value }) - } - - public fun extract_balance(addr: address): u64 acquires Balance { - let Balance { value } = move_from(addr); // acquires needed - value - } -} -} -``` - -`acquires` annotations must also be added for transitive calls within the module. Calls to these functions from another module do not need to annotated with these acquires because one module cannot access resources declared in another module--so the annotation is not needed to ensure reference safety. - -```move= -address 0x42 { -module example { - - struct Balance has key { value: u64 } - - public fun add_balance(s: &signer, value: u64) { - move_to(s, Balance { value }) - } - - public fun extract_balance(addr: address): u64 acquires Balance { - let Balance { value } = move_from(addr); // acquires needed - value - } - - public fun extract_and_add(sender: address, receiver: &signer) acquires Balance { - let value = extract_balance(sender); // acquires needed here - add_balance(receiver, value) - } -} -} - -address 0x42 { -module other { - fun extract_balance(addr: address): u64 { - 0x42::example::extract_balance(addr) // no acquires needed - } -} -} -``` - -A function can `acquire` as many resources as it needs to - -```move= -address 0x42 { -module example { - use std::vector; - - struct Balance has key { value: u64 } - struct Box has key { items: vector } - - public fun store_two( - addr: address, - item1: Item1, - item2: Item2, - ) acquires Balance, Box { - let balance = borrow_global_mut(addr); // acquires needed - balance.value = balance.value - 2; - let box1 = borrow_global_mut>(addr); // acquires needed - vector::push_back(&mut box1.items, item1); - let box2 = borrow_global_mut>(addr); // acquires needed - vector::push_back(&mut box2.items, item2); - } -} -} -``` - -### Return type - -After the parameters, a function specifies its return type. - -```move -fun zero(): u64 { 0 } -``` - -Here `: u64` indicates that the function's return type is `u64`. - -:::tip -A function can return an immutable `&` or mutable `&mut` [reference](./references.md) if derived from an input reference. Keep in mind, this means that a function [cannot return a reference to global storage](./references.md#references-cannot-be-stored) unless it is an [inline function](#inline-functions). -::: - -Using tuples, a function can return multiple values: - -```move -fun one_two_three(): (u64, u64, u64) { (0, 1, 2) } -``` - -If no return type is specified, the function has an implicit return type of unit `()`. These functions are equivalent: - -```move -fun just_unit(): () { () } -fun just_unit() { () } -fun just_unit() { } -``` - -`script` functions must have a return type of unit `()`: - -```move -script { - fun do_nothing() { - } -} -``` - -As mentioned in the [tuples section](./tuples.md), these tuple "values" are virtual and do not exist at runtime. So for a function that returns unit `()`, it will not be returning any value at all during execution. - -### Function body - -A function's body is an expression block. The return value of the function is the last value in the sequence - -```move= -fun example(): u64 { - let x = 0; - x = x + 1; - x // returns 'x' -} -``` - -See [the section below for more information on returns](#returning-values) - -For more information on expression blocks, see [Move variables](./variables.md). - -### Native Functions - -Some functions do not have a body specified, and instead have the body provided by the VM. These functions are marked `native`. - -Without modifying the VM source code, a programmer cannot add new native functions. Furthermore, it is the intent that `native` functions are used for either standard library code or for functionality needed for the given Move environment. - -Most `native` functions you will likely see are in standard library code such as `vector` - -```move= -module std::vector { - native public fun empty(): vector; - ... -} -``` - -## Calling - -When calling a function, the name can be specified either through an alias or fully qualified - -```move= -address 0x42 { -module example { - public fun zero(): u64 { 0 } -} -} - -script { - use 0x42::example::{Self, zero}; - fun call_zero() { - // With the `use` above all of these calls are equivalent - 0x42::example::zero(); - example::zero(); - zero(); - } -} -``` - -When calling a function, an argument must be given for every parameter. - -```move= -address 0x42 { -module example { - public fun takes_none(): u64 { 0 } - public fun takes_one(x: u64): u64 { x } - public fun takes_two(x: u64, y: u64): u64 { x + y } - public fun takes_three(x: u64, y: u64, z: u64): u64 { x + y + z } -} -} - -script { - use 0x42::example; - fun call_all() { - example::takes_none(); - example::takes_one(0); - example::takes_two(0, 1); - example::takes_three(0, 1, 2); - } -} -``` - -Type arguments can be either specified or inferred. Both calls are equivalent. - -```move= -address 0x42 { -module example { - public fun id(x: T): T { x } -} -} - -script { - use 0x42::example; - fun call_all() { - example::id(0); - example::id(0); - } -} -``` - -For more details, see [Move generics](./generics.md). - - -## Returning values - -The result of a function, its "return value", is the final value of its function body. For example - -```move= -fun add(x: u64, y: u64): u64 { - x + y -} -``` - -[As mentioned above](#function-body), the function's body is an [expression block](./variables.md). The expression block can be a sequence of various statements, and the final expression in the block will be the value of that block. - -```move= -fun double_and_add(x: u64, y: u64): u64 { - let double_x = x * 2; - let double_y = y * 2; - double_x + double_y -} -``` - -The return value here is `double_x + double_y` - -### `return` expression - -A function implicitly returns the value that its body evaluates to. However, functions can also use the explicit `return` expression: - -```move -fun f1(): u64 { return 0 } -fun f2(): u64 { 0 } -``` - -These two functions are equivalent. In this slightly more involved example, the function subtracts two `u64` values, but returns early with `0` if the second value is too large: - -```move= -fun safe_sub(x: u64, y: u64): u64 { - if (y > x) return 0; - x - y -} -``` - -Note that the body of this function could also have been written as `if (y > x) 0 else x - y`. - -However where `return` really shines is in exiting deep within other control flow constructs. In this example, the function iterates through a vector to find the index of a given value: - -```move= -use std::vector; -use std::option::{Self, Option}; -fun index_of(v: &vector, target: &T): Option { - let i = 0; - let n = vector::length(v); - while (i < n) { - if (vector::borrow(v, i) == target) return option::some(i); - i = i + 1 - }; - - option::none() -} -``` - -Using `return` without an argument is shorthand for `return ()`. That is, the following two functions are equivalent: - -```move -fun foo() { return } -fun foo() { return () } -``` - -## Inline Functions - -Inline functions are functions whose bodies are expanded in place at the caller location during compile time. -Thus, inline functions do not appear in Move bytecode as a separate functions: all calls to them are expanded away by the compiler. -In certain circumstances, they may lead to faster execution and save gas. -However, users should be aware that they could lead to larger bytecode size: excessive inlining potentially triggers various size restrictions. - -One can define an inline function by adding the `inline` keyword to a function declaration as shown below: - -```move= -inline fun percent(x: u64, y: u64):u64 { x * 100 / y } -``` - -If we call this inline function as `percent(2, 200)`, the compiler will replace this call with the inline function's body, as if the user has written `2 * 100 / 200`. - -### Function parameters and lambda expressions - -Inline functions support _function parameters_, which accept lambda expressions (i.e., anonymous functions) as arguments. -This feature allows writing several common programming patterns elegantly. -Similar to inline functions, lambda expressions are also expanded at call site. - -A lambda expression includes a list of parameter names (enclosed within `||`) followed by the body. -Some simple examples are: `|x| x + 1`, `|x, y| x + y`, `|| 1`, `|| { 1 }`. -A lambda's body can refer to variables available in the scope where the lambda is defined: this is also known as capturing. -Such variables can be read or written (if mutable) by the lambda expression. - -The type of a function parameter is written as `|| `. -For example, when the function parameter type is `|u64, u64| bool`, any lambda expression that takes two `u64` parameters and returns a `bool` value can be provided as the argument. - -Below is an example that showcases many of these concepts in action (this example is taken from the `std::vector` module): - -```move= -/// Fold the function over the elements. -/// E.g, `fold(vector[1,2,3], 0, f)` is the same as `f(f(f(0, 1), 2), 3)`. -public inline fun fold( - v: vector, - init: Accumulator, - f: |Accumulator,Element|Accumulator -): Accumulator { - let accu = init; - // Note: `for_each` is an inline function, but is not shown here. - for_each(v, |elem| accu = f(accu, elem)); - accu -} -``` - -The type signature of the elided public inline function `for_each` is `fun for_each(v: vector, f: |Element|)`. -Its second parameter `f` is a function parameter which accepts any lambda expression that consumes an `Element` and returns nothing. -In the code example, we use the lambda expression `|elem| accu = f(accu, elem)` as an argument to this function parameter. -Note that this lambda expression captures the variable `accu` from the outer scope. - -### Current restrictions - -There are plans to loosen some of these restrictions in the future, but for now, - -- Only inline functions can have function parameters. -- Only explicit lambda expressions can be passed as an argument to an inline function's function parameters. -- Inline functions and lambda expressions cannot have `return`, `break`, or `continue` expressions. -- Inline functions or lambda expressions cannot return lambda expressions. -- Cyclic recursion involving only inline functions is not allowed. -- Parameters in lambda expressions must not be type annotated (e.g., `|x: u64| x + 1` is not allowed): their types are inferred. - -### Additional considerations - -- Avoid using module-private constants/methods in public inline functions. - When such inline functions are called outside of that module, an in-place expansion at call site leads to invalid access of the private constants/methods. -- Avoid marking large functions that are called at different locations as inline. Also avoid inline functions calling lots of other inline functions transitively. - These may lead to excessive inlining and increase the bytecode size. -- Inline functions can be useful for returning references to global storage, which non-inline functions cannot do. diff --git a/developer-docs-site/docs/move/book/generics.md b/developer-docs-site/docs/move/book/generics.md deleted file mode 100644 index 1e5364cd4a3e7..0000000000000 --- a/developer-docs-site/docs/move/book/generics.md +++ /dev/null @@ -1,482 +0,0 @@ -# Generics - -Generics can be used to define functions and structs over different input data types. This language feature is sometimes referred to as *parametric polymorphism*. In Move, we will often use the term generics interchangeably with type parameters and type arguments. - -Generics are commonly used in library code, such as in vector, to declare code that works over any possible instantiation (that satisfies the specified constraints). In other frameworks, generic code can sometimes be used to interact with global storage many different ways that all still share the same implementation. - -## Declaring Type Parameters - -Both functions and structs can take a list of type parameters in their signatures, enclosed by a pair of angle brackets `<...>`. - -### Generic Functions - -Type parameters for functions are placed after the function name and before the (value) parameter list. The following code defines a generic identity function that takes a value of any type and returns that value unchanged. - -```move -fun id(x: T): T { - // this type annotation is unnecessary but valid - (x: T) -} -``` - -Once defined, the type parameter `T` can be used in parameter types, return types, and inside the function body. - -### Generic Structs - -Type parameters for structs are placed after the struct name, and can be used to name the types of the fields. - -```move -struct Foo has copy, drop { x: T } - -struct Bar has copy, drop { - x: T1, - y: vector, -} -``` - -Note that [type parameters do not have to be used](#unused-type-parameters) - -## Type Arguments - -### Calling Generic Functions - -When calling a generic function, one can specify the type arguments for the function's type parameters in a list enclosed by a pair of angle brackets. - -```move -fun foo() { - let x = id(true); -} -``` - -If you do not specify the type arguments, Move's [type inference](#type-inference) will supply them for you. - -### Using Generic Structs - -Similarly, one can attach a list of type arguments for the struct's type parameters when constructing or destructing values of generic types. - -```move -fun foo() { - let foo = Foo { x: true }; - let Foo { x } = foo; -} -``` - -If you do not specify the type arguments, Move's [type inference](#type-inference) will supply them for you. - -### Type Argument Mismatch - -If you specify the type arguments and they conflict with the actual values supplied, an error will be given: - -```move -fun foo() { - let x = id(true); // error! true is not a u64 -} -``` - -and similarly: - -```move -fun foo() { - let foo = Foo { x: 0 }; // error! 0 is not a bool - let Foo
{ x } = foo; // error! bool is incompatible with address -} -``` - -## Type Inference - -In most cases, the Move compiler will be able to infer the type arguments so you don't have to write them down explicitly. Here's what the examples above would look like if we omit the type arguments: - -```move -fun foo() { - let x = id(true); - // ^ is inferred - - let foo = Foo { x: true }; - // ^ is inferred - - let Foo { x } = foo; - // ^ is inferred -} -``` - -Note: when the compiler is unable to infer the types, you'll need annotate them manually. A common scenario is to call a function with type parameters appearing only at return positions. - -```move -address 0x2 { -module m { - using std::vector; - - fun foo() { - // let v = vector::new(); - // ^ The compiler cannot figure out the element type. - - let v = vector::new(); - // ^~~~~ Must annotate manually. - } -} -} -``` - -However, the compiler will be able to infer the type if that return value is used later in that function: - -```move -address 0x2 { -module m { - using std::vector; - - fun foo() { - let v = vector::new(); - // ^ is inferred - vector::push_back(&mut v, 42); - } -} -} -``` - -## Unused Type Parameters - -For a struct definition, -an unused type parameter is one that -does not appear in any field defined in the struct, -but is checked statically at compile time. -Move allows unused type parameters so the following struct definition is valid: - -```move -struct Foo { - foo: u64 -} -``` - -This can be convenient when modeling certain concepts. Here is an example: - -```move -address 0x2 { -module m { - // Currency Specifiers - struct Currency1 {} - struct Currency2 {} - - // A generic coin type that can be instantiated using a currency - // specifier type. - // e.g. Coin, Coin etc. - struct Coin has store { - value: u64 - } - - // Write code generically about all currencies - public fun mint_generic(value: u64): Coin { - Coin { value } - } - - // Write code concretely about one currency - public fun mint_concrete(value: u64): Coin { - Coin { value } - } -} -} -``` - -In this example, -`struct Coin` is generic on the `Currency` type parameter, -which specifies the currency of the coin and -allows code to be written either -generically on any currency or -concretely on a specific currency. -This genericity applies even when the `Currency` type parameter -does not appear in any of the fields defined in `Coin`. - -### Phantom Type Parameters - -In the example above, -although `struct Coin` asks for the `store` ability, -neither `Coin` nor `Coin` will have the `store` ability. -This is because of the rules for -[Conditional Abilities and Generic Types](./abilities.md#conditional-abilities-and-generic-types) -and the fact that `Currency1` and `Currency2` don't have the `store` ability, -despite the fact that they are not even used in the body of `struct Coin`. -This might cause some unpleasant consequences. -For example, we are unable to put `Coin` into a wallet in the global storage. - -One possible solution would be to -add spurious ability annotations to `Currency1` and `Currency2` -(i.e., `struct Currency1 has store {}`). -But, this might lead to bugs or security vulnerabilities -because it weakens the types with unnecessary ability declarations. -For example, we would never expect a resource in the global storage to have a field in type `Currency1`, -but this would be possible with the spurious `store` ability. -Moreover, the spurious annotations would be infectious, -requiring many functions generic on the unused type parameter to also include the necessary constraints. - -Phantom type parameters solve this problem. -Unused type parameters can be marked as *phantom* type parameters, -which do not participate in the ability derivation for structs. -In this way, -arguments to phantom type parameters are not considered when deriving the abilities for generic types, -thus avoiding the need for spurious ability annotations. -For this relaxed rule to be sound, -Move's type system guarantees that a parameter declared as `phantom` is either -not used at all in the struct definition, or -it is only used as an argument to type parameters also declared as `phantom`. - -#### Declaration - -In a struct definition -a type parameter can be declared as phantom by adding the `phantom` keyword before its declaration. -If a type parameter is declared as phantom we say it is a phantom type parameter. -When defining a struct, Move's type checker ensures that every phantom type parameter is either -not used inside the struct definition or -it is only used as an argument to a phantom type parameter. - -More formally, -if a type is used as an argument to a phantom type parameter -we say the type appears in _phantom position_. -With this definition in place, -the rule for the correct use of phantom parameters can be specified as follows: -**A phantom type parameter can only appear in phantom position**. - -The following two examples show valid uses of phantom parameters. -In the first one, -the parameter `T1` is not used at all inside the struct definition. -In the second one, the parameter `T1` is only used as an argument to a phantom type parameter. - -```move -struct S1 { f: u64 } - ^^ - Ok: T1 does not appear inside the struct definition - - -struct S2 { f: S1 } - ^^ - Ok: T1 appears in phantom position -``` - -The following code shows examples of violations of the rule: - -```move -struct S1 { f: T } - ^ - Error: Not a phantom position - -struct S2 { f: T } - -struct S3 { f: S2 } - ^ - Error: Not a phantom position -``` - -#### Instantiation - -When instantiating a struct, -the arguments to phantom parameters are excluded when deriving the struct abilities. -For example, consider the following code: - -```move -struct S has copy { f: T1 } -struct NoCopy {} -struct HasCopy has copy {} -``` - -Consider now the type `S`. -Since `S` is defined with `copy` and all non-phantom arguments have `copy` -then `S` also has `copy`. - -#### Phantom Type Parameters with Ability Constraints - -Ability constraints and phantom type parameters are orthogonal features in the sense that -phantom parameters can be declared with ability constraints. -When instantiating a phantom type parameter with an ability constraint, -the type argument has to satisfy that constraint, -even though the parameter is phantom. -For example, the following definition is perfectly valid: - -```move -struct S {} -``` - -The usual restrictions apply and `T` can only be instantiated with arguments having `copy`. - -## Constraints - -In the examples above, we have demonstrated how one can use type parameters to define "unknown" types that can be plugged in by callers at a later time. This however means the type system has little information about the type and has to perform checks in a very conservative way. In some sense, the type system must assume the worst case scenario for an unconstrained generic. Simply put, by default generic type parameters have no [abilities](./abilities.md). - -This is where constraints come into play: they offer a way to specify what properties these unknown types have so the type system can allow operations that would otherwise be unsafe. - -### Declaring Constraints - -Constraints can be imposed on type parameters using the following syntax. - -```move -// T is the name of the type parameter -T: (+ )* -``` - -The `` can be any of the four [abilities](./abilities.md), and a type parameter can be constrained with multiple abilities at once. So all of the following would be valid type parameter declarations: - -```move -T: copy -T: copy + drop -T: copy + drop + store + key -``` - -### Verifying Constraints - -Constraints are checked at call sites so the following code won't compile. - -```move -struct Foo { x: T } - -struct Bar { x: Foo } -// ^ error! u8 does not have 'key' - -struct Baz { x: Foo } -// ^ error! T does not have 'key' -``` - -```move -struct R {} - -fun unsafe_consume(x: T) { - // error! x does not have 'drop' -} - -fun consume(x: T) { - // valid! - // x will be dropped automatically -} - -fun foo() { - let r = R {}; - consume(r); - // ^ error! R does not have 'drop' -} -``` - -```move -struct R {} - -fun unsafe_double(x: T) { - (copy x, x) - // error! x does not have 'copy' -} - -fun double(x: T) { - (copy x, x) // valid! -} - -fun foo(): (R, R) { - let r = R {}; - double(r) - // ^ error! R does not have 'copy' -} -``` - -For more information, see the abilities section on [conditional abilities and generic types](./abilities.md#conditional-abilities-and-generic-types). - -## Limitations on Recursions - -### Recursive Structs - -Generic structs can not contain fields of the same type, either directly or indirectly, even with different type arguments. All of the following struct definitions are invalid: - -```move -struct Foo { - x: Foo // error! 'Foo' containing 'Foo' -} - -struct Bar { - x: Bar // error! 'Bar' containing 'Bar' -} - -// error! 'A' and 'B' forming a cycle, which is not allowed either. -struct A { - x: B -} - -struct B { - x: A - y: A -} -``` - -### Advanced Topic: Type-level Recursions - -Move allows generic functions to be called recursively. However, when used in combination with generic structs, this could create an infinite number of types in certain cases, and allowing this means adding unnecessary complexity to the compiler, vm and other language components. Therefore, such recursions are forbidden. - -Allowed: - -```move -address 0x2 { -module m { - struct A {} - - // Finitely many types -- allowed. - // foo -> foo -> foo -> ... is valid - fun foo() { - foo(); - } - - // Finitely many types -- allowed. - // foo -> foo> -> foo> -> ... is valid - fun foo() { - foo>(); - } -} -} -``` - -Not allowed: - -```move -address 0x2 { -module m { - struct A {} - - // Infinitely many types -- NOT allowed. - // error! - // foo -> foo> -> foo>> -> ... - fun foo() { - foo>(); - } -} -} -``` - -```move -address 0x2 { -module n { - struct A {} - - // Infinitely many types -- NOT allowed. - // error! - // foo -> bar -> foo> - // -> bar, T2> -> foo, A> - // -> bar, A> -> foo, A>> - // -> ... - fun foo() { - bar(); - } - - fun bar { - foo>(); - } -} -} -``` - -Note, the check for type level recursions is based on a conservative analysis on the call sites and does NOT take control flow or runtime values into account. - -```move -address 0x2 { -module m { - struct A {} - - fun foo(n: u64) { - if (n > 0) { - foo>(n - 1); - }; - } -} -} -``` - -The function in the example above will technically terminate for any given input and therefore only creating finitely many types, but it is still considered invalid by Move's type system. diff --git a/developer-docs-site/docs/move/book/global-storage-operators.md b/developer-docs-site/docs/move/book/global-storage-operators.md deleted file mode 100644 index 19fd9d4f92647..0000000000000 --- a/developer-docs-site/docs/move/book/global-storage-operators.md +++ /dev/null @@ -1,250 +0,0 @@ -# Global Storage - Operators - -Move programs can create, delete, and update [resources](./structs-and-resources.md) in global storage using the following five instructions: - - -| Operation | Description | Aborts? | ----------------------------------------- |---------------------------------------------------------------- |---------------------------------------- | -|`move_to(&signer,T)` | Publish `T` under `signer.address` | If `signer.address` already holds a `T` | -|`move_from(address): T` | Remove `T` from `address` and return it | If `address` does not hold a `T` | -|`borrow_global_mut(address): &mut T` | Return a mutable reference to the `T` stored under `address` | If `address` does not hold a `T` | -|`borrow_global(address): &T` | Return an immutable reference to the `T` stored under `address` | If `address` does not hold a `T` | -|`exists(address): bool` | Return `true` if a `T` is stored under `address` | Never | - - -Each of these instructions is parameterized by a type `T` with the [`key` ability](./abilities.md). However, each type `T` *must be declared in the current module*. This ensures that a resource can only be manipulated via the API exposed by its defining module. The instructions also take either an [`address`](./address.md) or [`&signer`](./signer.md) representing the account address where the resource of type `T` is stored. - -## References to resources - -References to global resources returned by `borrow_global` or `borrow_global_mut` mostly behave like references to local storage: they can be extended, read, and written using ordinary [reference operators](./references.md) and passed as arguments to other function. However, there is one important difference between local and global references: **a function cannot return a reference that points into global storage**. For example, these two functions will each fail to compile: - -```move -struct R has key { f: u64 } -// will not compile -fun ret_direct_resource_ref_bad(a: address): &R { - borrow_global(a) // error! -} -// also will not compile -fun ret_resource_field_ref_bad(a: address): &u64 { - &borrow_global(a).f // error! -} -``` - -Move must enforce this restriction to guarantee absence of dangling references to global storage. [This](#reference-safety-for-global-resources) section contains much more detail for the interested reader. - -## Global storage operators with generics - -Global storage operations can be applied to generic resources with both instantiated and uninstantiated generic type parameters: - -```move -struct Container has key { t: T } - -// Publish a Container storing a type T of the caller's choosing -fun publish_generic_container(account: &signer, t: T) { - move_to>(account, Container { t }) -} - -/// Publish a container storing a u64 -fun publish_instantiated_generic_container(account: &signer, t: u64) { - move_to>(account, Container { t }) -} -``` - -The ability to index into global storage via a type parameter chosen at runtime is a powerful Move feature known as *storage polymorphism*. For more on the design patterns enabled by this feature, see [Move generics](./generics.md). - -## Example: `Counter` - -The simple `Counter` module below exercises each of the five global storage operators. The API exposed by this module allows: - -- Anyone to publish a `Counter` resource under their account -- Anyone to check if a `Counter` exists under any address -- Anyone to read or increment the value of a `Counter` resource under any address -- An account that stores a `Counter` resource to reset it to zero -- An account that stores a `Counter` resource to remove and delete it - -```move -address 0x42 { -module counter { - use std::signer; - - /// Resource that wraps an integer counter - struct Counter has key { i: u64 } - - /// Publish a `Counter` resource with value `i` under the given `account` - public fun publish(account: &signer, i: u64) { - // "Pack" (create) a Counter resource. This is a privileged operation that - // can only be done inside the module that declares the `Counter` resource - move_to(account, Counter { i }) - } - - /// Read the value in the `Counter` resource stored at `addr` - public fun get_count(addr: address): u64 acquires Counter { - borrow_global(addr).i - } - - /// Increment the value of `addr`'s `Counter` resource - public fun increment(addr: address) acquires Counter { - let c_ref = &mut borrow_global_mut(addr).i; - *c_ref = *c_ref + 1 - } - - /// Reset the value of `account`'s `Counter` to 0 - public fun reset(account: &signer) acquires Counter { - let c_ref = &mut borrow_global_mut(signer::address_of(account)).i; - *c_ref = 0 - } - - /// Delete the `Counter` resource under `account` and return its value - public fun delete(account: &signer): u64 acquires Counter { - // remove the Counter resource - let c = move_from(signer::address_of(account)); - // "Unpack" the `Counter` resource into its fields. This is a - // privileged operation that can only be done inside the module - // that declares the `Counter` resource - let Counter { i } = c; - i - } - - /// Return `true` if `addr` contains a `Counter` resource - public fun exists(addr: address): bool { - exists(addr) - } -} -} -``` - -## Annotating functions with `acquires` - -In the `counter` example, you might have noticed that the `get_count`, `increment`, `reset`, and `delete` functions are annotated with `acquires Counter`. A Move function `m::f` must be annotated with `acquires T` if and only if: - -- The body of `m::f` contains a `move_from`, `borrow_global_mut`, or `borrow_global` instruction, or -- The body of `m::f` invokes a function `m::g` declared in the same module that is annotated with `acquires` - -For example, the following function inside `Counter` would need an `acquires` annotation: - -```move -// Needs `acquires` because `increment` is annotated with `acquires` -fun call_increment(addr: address): u64 acquires Counter { - counter::increment(addr) -} -``` - -However, the same function *outside* `Counter` would not need an annotation: - -```move -address 0x43 { -module m { - use 0x42::counter; - - // Ok. Only need annotation when resource acquired by callee is declared - // in the same module - fun call_increment(addr: address): u64 { - counter::increment(addr) - } -} -} -``` - -If a function touches multiple resources, it needs multiple `acquires`: - -```move= -address 0x42 { -module two_resources { - struct R1 has key { f: u64 } - struct R2 has key { g: u64 } - - fun double_acquires(a: address): u64 acquires R1, R2 { - borrow_global(a).f + borrow_global.g - } -} -} -``` - -The `acquires` annotation does not take generic type parameters into account: - -```move= -address 0x42 { -module m { - struct R has key { t: T } - - // `acquires R`, not `acquires R` - fun acquire_generic_resource(a: addr) acquires R { - let _ = borrow_global>(a); - } - - // `acquires R`, not `acquires R - fun acquire_instantiated_generic_resource(a: addr) acquires R { - let _ = borrow_global>(a); - } -} -} -``` - -Finally: redundant `acquires` are not allowed. Adding this function inside `Counter` will result in a compilation error: - -```move -// This code will not compile because the body of the function does not use a global -// storage instruction or invoke a function with `acquires` -fun redundant_acquires_bad() acquires Counter {} -``` - -For more information on `acquires`, see [Move functions](./functions.md). - -## Reference Safety For Global Resources - -Move prohibits returning global references and requires the `acquires` annotation to prevent dangling references. This allows Move to live up to its promise of static reference safety (i.e., no dangling references, no `null` or `nil` dereferences) for all [reference](./references.md) types. - -This example illustrates how the Move type system uses `acquires` to prevent a dangling reference: - -```move= -address 0x42 { -module dangling { - struct T has key { f: u64 } - - fun borrow_then_remove_bad(a: address) acquires T { - let t_ref: &mut T = borrow_global_mut(a); - let t = remove_t(a); // type system complains here - // t_ref now dangling! - let uh_oh = *&t_ref.f - } - - fun remove_t(a: address): T acquires T { - move_from(a) - } - -} -} -``` - -In this code, line 6 acquires a reference to the `T` stored at address `a` in global storage. The callee `remove_t` then removes the value, which makes `t_ref` a dangling reference. - -Fortunately, this cannot happen because the type system will reject this program. The `acquires` annotation on `remove_t` lets the type system know that line 7 is dangerous, without having to recheck or introspect the body of `remove_t` separately! - -The restriction on returning global references prevents a similar, but even more insidious problem: - -```move= -address 0x42 { -module m1 { - struct T has key {} - - public fun ret_t_ref(a: address): &T acquires T { - borrow_global(a) // error! type system complains here - } - - public fun remove_t(a: address) acquires T { - let T {} = move_from(a); - } -} - -module m2 { - fun borrow_then_remove_bad(a: address) { - let t_ref = m1::ret_t_ref(a); - let t = m1::remove_t(a); // t_ref now dangling! - } -} -} -``` - -Line 16 acquires a reference to a global resource `m1::T`, then line 17 removes that same resource, which makes `t_ref` dangle. In this case, `acquires` annotations do not help us because the `borrow_then_remove_bad` function is outside of the `m1` module that declares `T` (recall that `acquires` annotations can only be used for resources declared in the current module). Instead, the type system avoids this problem by preventing the return of a global reference at line 6. - -Fancier type systems that would allow returning global references without sacrificing reference safety are possible, and we may consider them in future iterations of Move. We chose the current design because it strikes a good balance between expressivity, annotation burden, and type system complexity. diff --git a/developer-docs-site/docs/move/book/global-storage-structure.md b/developer-docs-site/docs/move/book/global-storage-structure.md deleted file mode 100644 index dc624e60c9bb8..0000000000000 --- a/developer-docs-site/docs/move/book/global-storage-structure.md +++ /dev/null @@ -1,14 +0,0 @@ -# Global Storage - Structure - -The purpose of Move programs is to [read from and write to](./global-storage-operators.md) tree-shaped persistent global storage. Programs cannot access the filesystem, network, or any other data outside of this tree. - -In pseudocode, the global storage looks something like: - -```move -struct GlobalStorage { - resources: Map<(address, ResourceType), ResourceValue> - modules: Map<(address, ModuleName), ModuleBytecode> -} -``` - -Structurally, global storage is a [forest](https://en.wikipedia.org/wiki/Tree_(graph_theory)) consisting of trees rooted at an account [`address`](./address.md). Each address can store both [resource](./structs-and-resources.md) data values and [module](./modules-and-scripts.md) code values. As the pseudocode above indicates, each `address` can store at most one resource value of a given type and at most one module with a given name. diff --git a/developer-docs-site/docs/move/book/integers.md b/developer-docs-site/docs/move/book/integers.md deleted file mode 100644 index a92f91994442a..0000000000000 --- a/developer-docs-site/docs/move/book/integers.md +++ /dev/null @@ -1,153 +0,0 @@ -# Integers - -Move supports six unsigned integer types: `u8`, `u16`, `u32`, `u64`, `u128`, and `u256`. Values of these types range from 0 to a maximum that depends on the size of the type. - -| Type | Value Range | -| -------------------------------- | ------------------------ | -| Unsigned 8-bit integer, `u8` | 0 to 28 - 1 | -| Unsigned 16-bit integer, `u16` | 0 to 216 - 1 | -| Unsigned 32-bit integer, `u32` | 0 to 232 - 1 | -| Unsigned 64-bit integer, `u64` | 0 to 264 - 1 | -| Unsigned 128-bit integer, `u128` | 0 to 2128 - 1 | -| Unsigned 256-bit integer, `u256` | 0 to 2256 - 1 | - -## Literals - -Literal values for these types are specified either as a sequence of digits (e.g.,`112`) or as hex literals, e.g., `0xFF`. The type of the literal can optionally be added as a suffix, e.g., `112u8`. If the type is not specified, the compiler will try to infer the type from the context where the literal is used. If the type cannot be inferred, it is assumed to be `u64`. - -Number literals can be separated by underscores for grouping and readability. (e.g.,`1_234_5678`, `1_000u128`, `0xAB_CD_12_35`). - -If a literal is too large for its specified (or inferred) size range, an error is reported. - -### Examples - -```move -// literals with explicit annotations; -let explicit_u8 = 1u8; -let explicit_u16 = 1u16; -let explicit_u32 = 1u32; -let explicit_u64 = 2u64; -let explicit_u128 = 3u128; -let explicit_u256 = 1u256; -let explicit_u64_underscored = 154_322_973u64; - -// literals with simple inference -let simple_u8: u8 = 1; -let simple_u16: u16 = 1; -let simple_u32: u32 = 1; -let simple_u64: u64 = 2; -let simple_u128: u128 = 3; -let simple_u256: u256 = 1; - -// literals with more complex inference -let complex_u8 = 1; // inferred: u8 -// right hand argument to shift must be u8 -let _unused = 10 << complex_u8; - -let x: u8 = 38; -let complex_u8 = 2; // inferred: u8 -// arguments to `+` must have the same type -let _unused = x + complex_u8; - -let complex_u128 = 133_876; // inferred: u128 -// inferred from function argument type -function_that_takes_u128(complex_u128); - -// literals can be written in hex -let hex_u8: u8 = 0x1; -let hex_u16: u16 = 0x1BAE; -let hex_u32: u32 = 0xDEAD80; -let hex_u64: u64 = 0xCAFE; -let hex_u128: u128 = 0xDEADBEEF; -let hex_u256: u256 = 0x1123_456A_BCDE_F; -``` - -## Operations - -### Arithmetic - -Each of these types supports the same set of checked arithmetic operations. For all of these operations, both arguments (the left and right side operands) *must* be of the same type. If you need to operate over values of different types, you will need to first perform a [cast](#casting). Similarly, if you expect the result of the operation to be too large for the integer type, perform a [cast](#casting) to a larger size before performing the operation. - -All arithmetic operations abort instead of behaving in a way that mathematical integers would not (e.g., overflow, underflow, divide-by-zero). - -| Syntax | Operation | Aborts If -|--------|-----------|------------------------------------- -| `+` |addition | Result is too large for the integer type -| `-` | subtraction | Result is less than zero -| `*` | multiplication | Result is too large for the integer type -| `%` | modular division | The divisor is `0` -| `/` | truncating division | The divisor is `0` - -### Bitwise - -The integer types support the following bitwise operations that treat each number as a series of individual bits, either 0 or 1, instead of as numerical integer values. - -Bitwise operations do not abort. - -| Syntax | Operation | Description | -|---------------------|-------------|-------------------------------------------------------| -| `&` | bitwise and | Performs a boolean and for each bit pairwise | -| | | bitwise or | Performs a boolean or for each bit pairwise | -| `^` | bitwise xor | Performs a boolean exclusive or for each bit pairwise | - -### Bit Shifts - -Similar to the bitwise operations, each integer type supports bit shifts. But unlike the other operations, the righthand side operand (how many bits to shift by) must *always* be a `u8` and need not match the left side operand (the number you are shifting). - -Bit shifts can abort if the number of bits to shift by is greater than or equal to `8`, `16`, `32`, `64`, `128` or `256` for `u8`, `u16`, `u32`, `u64`, `u128` and `u256` respectively. - -| Syntax | Operation | Aborts if -|--------|------------|---------- -|`<<` | shift left | Number of bits to shift by is greater than the size of the integer type -|`>>` | shift right| Number of bits to shift by is greater than the size of the integer type - -### Comparisons - -Integer types are the *only* types in Move that can use the comparison operators. Both arguments need to be of the same type. If you need to compare integers of different types, you will need to [cast](#casting) one of them first. - -Comparison operations do not abort. - -| Syntax | Operation -|--------|----------- -| `<` | less than -| `>` | greater than -| `<=` | less than or equal to -| `>=` | greater than or equal to - -### Equality - -Like all types with [`drop`](./abilities.md) in Move, all integer types support the ["equal"](./equality.md) and ["not equal"](./equality.md) operations. Both arguments need to be of the same type. If you need to compare integers of different types, you will need to [cast](#casting) one of them first. - -Equality operations do not abort. - -| Syntax | Operation -|--------|---------- -| `==` | equal -| `!=` | not equal - -For more details see the section on [equality](./equality.md) - -## Casting - -Integer types of one size can be cast to integer types of another size. Integers are the only types in Move that support casting. - -Casts *do not* truncate. Casting will abort if the result is too large for the specified type - -| Syntax | Operation | Aborts if -|------------|---------------------------------------------------------------------------------|--------------------------------------- -| `(e as T)`| Cast integer expression `e` into an integer type `T` | `e` is too large to represent as a `T` - -Here, the type of `e` must be `8`, `16`, `32`, `64`, `128` or `256` and `T` must be `u8`, `u16`, `u32`, `u64`, `u128` or `u256`. - -For example: - -- `(x as u8)` -- `(y as u16)` -- `(873u16 as u32)` -- `(2u8 as u64)` -- `(1 + 3 as u128)` -- `(4/2 + 12345 as u256)` - -## Ownership - -As with the other scalar values built-in to the language, integer values are implicitly copyable, meaning they can be copied without an explicit instruction such as [`copy`](./variables.md#move-and-copy). diff --git a/developer-docs-site/docs/move/book/introduction.md b/developer-docs-site/docs/move/book/introduction.md deleted file mode 100644 index f4a93db31b531..0000000000000 --- a/developer-docs-site/docs/move/book/introduction.md +++ /dev/null @@ -1,15 +0,0 @@ -# Introduction - -Welcome to Move, a next generation language for secure, sandboxed, and formally verified programming. It has been used as the smart contract language for several blockchains including Aptos. Move allows developers to write programs that flexibly manage and transfer assets, while providing the security and protections against attacks on those assets. However, Move has been developed with use cases in mind outside a blockchain context as well. - -Move takes its cue from [Rust](https://www.rust-lang.org/) by using resource types with move (hence the name) semantics as an explicit representation of digital assets, such as currency. - -## Who is Aptos Move Book for? - -Move was designed and created as a secure, verified, yet flexible programming language. The first use of Move is for the implementation of the Diem blockchain, and it is currently being used on Aptos. - -This book is suitable for developers who are with some programming experience and who want to begin understanding the core programming language and see examples of its usage. - -## Where Do I Start? - -Begin with understanding [modules and scripts](./modules-and-scripts.md) and then work through the [Move Tutorial](./creating-coins.md). diff --git a/developer-docs-site/docs/move/book/loops.md b/developer-docs-site/docs/move/book/loops.md deleted file mode 100644 index 2dfac3fdd95a1..0000000000000 --- a/developer-docs-site/docs/move/book/loops.md +++ /dev/null @@ -1,184 +0,0 @@ -# While and Loop - -Move offers two constructs for looping: `while` and `loop`. - -## `while` loops - -The `while` construct repeats the body (an expression of type unit) until the condition (an expression of type `bool`) evaluates to `false`. - -Here is an example of simple `while` loop that computes the sum of the numbers from `1` to `n`: - -```move -fun sum(n: u64): u64 { - let sum = 0; - let i = 1; - while (i <= n) { - sum = sum + i; - i = i + 1 - }; - - sum -} -``` - -Infinite loops are allowed: - -```move= -fun foo() { - while (true) { } -} -``` - -### `break` - -The `break` expression can be used to exit a loop before the condition evaluates to `false`. For example, this loop uses `break` to find the smallest factor of `n` that's greater than 1: - -```move -fun smallest_factor(n: u64): u64 { - // assuming the input is not 0 or 1 - let i = 2; - while (i <= n) { - if (n % i == 0) break; - i = i + 1 - }; - - i -} -``` - -The `break` expression cannot be used outside of a loop. - -### `continue` - -The `continue` expression skips the rest of the loop and continues to the next iteration. This loop uses `continue` to compute the sum of `1, 2, ..., n`, except when the number is divisible by 10: - -```move -fun sum_intermediate(n: u64): u64 { - let sum = 0; - let i = 0; - while (i < n) { - i = i + 1; - if (i % 10 == 0) continue; - sum = sum + i; - }; - - sum -} -``` - -The `continue` expression cannot be used outside of a loop. - -### The type of `break` and `continue` - -`break` and `continue`, much like `return` and `abort`, can have any type. The following examples illustrate where this flexible typing can be helpful: - -```move -fun pop_smallest_while_not_equal( - v1: vector, - v2: vector, -): vector { - let result = vector::empty(); - while (!vector::is_empty(&v1) && !vector::is_empty(&v2)) { - let u1 = *vector::borrow(&v1, vector::length(&v1) - 1); - let u2 = *vector::borrow(&v2, vector::length(&v2) - 1); - let popped = - if (u1 < u2) vector::pop_back(&mut v1) - else if (u2 < u1) vector::pop_back(&mut v2) - else break; // Here, `break` has type `u64` - vector::push_back(&mut result, popped); - }; - - result -} -``` - -```move -fun pick( - indexes: vector, - v1: &vector
, - v2: &vector
-): vector
{ - let len1 = vector::length(v1); - let len2 = vector::length(v2); - let result = vector::empty(); - while (!vector::is_empty(&indexes)) { - let index = vector::pop_back(&mut indexes); - let chosen_vector = - if (index < len1) v1 - else if (index < len2) v2 - else continue; // Here, `continue` has type `&vector
` - vector::push_back(&mut result, *vector::borrow(chosen_vector, index)) - }; - - result -} -``` - -## The `loop` expression - -The `loop` expression repeats the loop body (an expression with type `()`) until it hits a `break` - -Without a `break`, the loop will continue forever - -```move -fun foo() { - let i = 0; - loop { i = i + 1 } -} -``` - -Here is an example that uses `loop` to write the `sum` function: - -```move -fun sum(n: u64): u64 { - let sum = 0; - let i = 0; - loop { - i = i + 1; - if (i > n) break; - sum = sum + i - }; - - sum -} -``` - -As you might expect, `continue` can also be used inside a `loop`. Here is `sum_intermediate` from above rewritten using `loop` instead of `while` - -```move -fun sum_intermediate(n: u64): u64 { - let sum = 0; - let i = 0; - loop { - i = i + 1; - if (i % 10 == 0) continue; - if (i > n) break; - sum = sum + i - }; - - sum -} -``` - -## The type of `while` and `loop` - -Move loops are typed expressions. A `while` expression always has type `()`. - -```move -let () = while (i < 10) { i = i + 1 }; -``` - -If a `loop` contains a `break`, the expression has type unit `()` - -```move -(loop { if (i < 10) i = i + 1 else break }: ()); -let () = loop { if (i < 10) i = i + 1 else break }; -``` - -If `loop` does not have a `break`, `loop` can have any type much like `return`, `abort`, `break`, and `continue`. - -```move -(loop (): u64); -(loop (): address); -(loop (): &vector>); -``` diff --git a/developer-docs-site/docs/move/book/modules-and-scripts.md b/developer-docs-site/docs/move/book/modules-and-scripts.md deleted file mode 100644 index 181ee743b9899..0000000000000 --- a/developer-docs-site/docs/move/book/modules-and-scripts.md +++ /dev/null @@ -1,136 +0,0 @@ -# Modules and Scripts - -Move has two different types of programs: ***Modules*** and ***Scripts***. Modules are libraries that define struct types along with functions that operate on these types. Struct types define the schema of Move's [global storage](./global-storage-structure.md), and module functions define the rules for updating storage. Modules themselves are also stored in global storage. Scripts are executable entrypoints similar to a `main` function in a conventional language. A script typically calls functions of a published module that perform updates to global storage. Scripts are ephemeral code snippets that are not published in global storage. - -A Move source file (or **compilation unit**) may contain multiple modules and scripts. However, publishing a module or executing a script are separate VM operations. - -## Syntax - -### Scripts - -:::tip Tutorial -To learn how to publish and execute a Move script, follow the [Move Scripts](../move-on-aptos/move-scripts.md) example. -::: - - -A script has the following structure: - -```text -script { - * - * - fun <[type parameters: constraint]*>([identifier: type]*) -} -``` - -A `script` block must start with all of its [`use`](./uses.md) declarations, followed by any [constants](./constants.md) and (finally) the main -[function](./functions.md) declaration. -The main function can have any name (i.e., it need not be called `main`), is the only function in a script block, can have any number of -arguments, and must not return a value. Here is an example with each of these components: - -```move -script { - // Import the debug module published at the named account address std. - use std::debug; - - const ONE: u64 = 1; - - fun main(x: u64) { - let sum = x + ONE; - debug::print(&sum) - } -} -``` - -Scripts have very limited power—they cannot declare friends, struct types or access global storage. Their primary purpose is to invoke module functions. - -### Modules - -A module has the following syntax: - -```text -module
:: { - ( | | | | )* -} -``` - -where `
` is a valid [named or literal address](./address.md). - -For example: - -```move -module 0x42::example { - struct Example has copy, drop { i: u64 } - - use std::debug; - friend 0x42::another_example; - - const ONE: u64 = 1; - - public fun print(x: u64) { - let sum = x + ONE; - let example = Example { i: sum }; - debug::print(&sum) - } -} -``` - -The `module 0x42::example` part specifies that the module `example` will be published under the [account address](./address.md) `0x42` in [global storage](./global-storage-structure.md). - -Modules can also be declared using [named addresses](./address.md). For example: - -```move -module example_addr::example { - struct Example has copy, drop { a: address } - - use std::debug; - friend example_addr::another_example; - - public fun print() { - let example = Example { a: @example_addr }; - debug::print(&example) - } -} -``` - -Because named addresses only exist at the source language level and during compilation, -named addresses will be fully substituted for their value at the bytecode -level. For example if we had the following code: - -```move -script { - fun example() { - my_addr::m::foo(@my_addr); - } -} -``` - -and we compiled it with `my_addr` set to `0xC0FFEE`, then it would be equivalent -to the following operationally: - -```move -script { - fun example() { - 0xC0FFEE::m::foo(@0xC0FFEE); - } -} -``` - -However at the source level, these _are not equivalent_—the function -`m::foo` _must_ be accessed through the `my_addr` named address, and not through -the numerical value assigned to that address. - -Module names can start with letters `a` to `z` or letters `A` to `Z`. After the first character, module names can contain underscores `_`, letters `a` to `z`, letters `A` to `Z`, or digits `0` to `9`. - -```move -module my_module {} -module foo_bar_42 {} -``` - -Typically, module names start with an lowercase letter. A module named `my_module` should be stored in a source file named `my_module.move`. - -All elements inside a `module` block can appear in any order. -Fundamentally, a module is a collection of [`types`](./structs-and-resources.md) and [`functions`](./functions.md). -The [`use`](./uses.md) keyword is used to import types from other modules. -The [`friend`](./friends.md) keyword specifies a list of trusted modules. -The [`const`](./constants.md) keyword defines private constants that can be used in the functions of a module. diff --git a/developer-docs-site/docs/move/book/package-upgrades.md b/developer-docs-site/docs/move/book/package-upgrades.md deleted file mode 100644 index 3f8eb7f307c35..0000000000000 --- a/developer-docs-site/docs/move/book/package-upgrades.md +++ /dev/null @@ -1,121 +0,0 @@ -# Package Upgrades - -Move code (e.g., Move modules) on the Aptos blockchain can be upgraded. This -allows code owners and module developers to update and evolve their contracts -under a single, stable, well-known account address that doesn't change. If a -module upgrade happens, all consumers of that module will automatically receive -the latest version of the code (e.g., the next time they interact with it). - -The Aptos blockchain natively supports different _upgrade policies_, which allow -move developers to explicitly define the constraints around how their move code -can be upgraded. The default policy is _backwards compatible_. This means that -code upgrades are accepted only if they guarantee that no existing resource storage -or public APIs are broken by the upgrade (including public functions). -This compatibility checking is possible because of Move's strongly typed bytecode -semantics. - -We note, however, that even compatible upgrades can have hazardous effects on -applications and dependent Move code (for example, if the semantics of the underlying -module are modified). As a result, developers should be careful when depending on -third-party Move code that can be upgraded on-chain. See -[Security considerations for dependencies](#security-considerations-for-dependencies) -for more details. - -## How it works - -Move code upgrades on the Aptos blockchain happen at the [Move package](./packages.md) -granularity. A package specifies an upgrade policy in the `Move.toml` manifest: - -```toml -[package] -name = "MyApp" -version = "0.0.1" -upgrade_policy = "compatible" -... -``` -:::tip Compatibility check -Aptos checks compatibility at the time a Move package is published via an Aptos transaction. This transaction will abort if deemed incompatible. -::: - -## How to upgrade - -To upgrade already published Move code, simply attempt to republish the code at -the same address that it was previously published. This can be done by following the -instructions for code compilation and publishing using the -[Aptos CLI](../../tools/aptos-cli/use-cli/use-aptos-cli.md). For an example, -see the [Your First Move Module](../../tutorials/first-move-module.md) tutorial. - -## Upgrade policies - -There are two different upgrade policies currently supported by Aptos: - -- `compatible`: these upgrades must be backwards compatible, specifically: - - For storage, all old struct declarations must be the same in - the new code. This ensures that the existing state of storage is - correctly interpreted by the new code. However, new struct declarations - can be added. - - For APIs, all existing public functions must have the same signature as - before. New functions, including public and entry functions, can be added. -- `immutable`: the code is not upgradeable and is guaranteed to stay the same - forever. - -Those policies are ordered regarding strength such that `compatible < immutable`, -i.e., compatible is weaker than immutable. The policy of a package on-chain can -only get stronger, not weaker. Moreover, the policy of all dependencies of a -package must be stronger or equal to the policy of the given package. For example, -an `immutable` package cannot refer directly or indirectly to a `compatible` package. -This gives users the guarantee that no unexpected updates can happen under the hood. - -Note that there is one exception to the above rule: framework packages -installed at addresses `0x1` to `0xa` are exempted from the dependency check. -This is necessary so one can define an `immutable` package based on the standard -libraries, which have the `compatible` policy to allow critical upgrades and fixes. - -## Compatibility rules -When using `compatible` upgrade policy, a module package can be upgraded. However, updates to existing modules already -published previously need to be compatible and follow the rules below: -- All existing structs' fields cannot be updated. This means no new fields can be added and existing fields cannot be -modified. Struct abilities also cannot be changed (no new ones added or existing removed). -- All public and entry functions cannot change their signature (argument types, type argument, return types). However, -argument names can change. -- `public(friend)` functions are treated as private and thus their signature can arbitrarily change. This is safe as -only modules in the same package can call friend functions anyway and they need to be updated if the signature changes. - -When updating your modules, if you see an incompatible error, make sure to check the above rules and fix any violations. - -## Security considerations for dependencies - -As mentioned above, even compatible upgrades can have disastrous effects for -applications that depend on the upgraded code. These effects can come from bugs, -but they can also be the result of malicious upgrades. For example, -an upgraded dependency can suddenly make all functions abort, breaking the -operation of your Move code. Alternatively, an upgraded dependency can make -all functions suddenly cost much more gas to execute then before the upgrade. -As result, dependencies to upgradeable packages need to be handled with care: - -- The safest dependency is, of course, an `immutable` package. This guarantees - that the dependency will never change, including its transitive dependencies. - In order to update an immutable package, the owner would have to introduce a - new major version, which is practically like deploying a new, separate - and independent package. This is because major versioning can be expressed - only by name (e.g. `module feature_v1` and `module feature_v2`). However, - not all package owners like to publish their code as `immutable`, because this - takes away the ability to fix bugs and update the code in place. -- If you have a dependency to a `compatible` package, it is highly - recommended you know and understand the entity publishing the package. - The highest level of assurance is when the package is governed by a - Decentralized Autonomous Organization (DAO) where no single user can initiate - an upgrade; a vote or similar has to be taken. This is the case for the Aptos - framework. - -## Programmatic upgrade - -In general, Aptos offers, via the Move module `aptos_framework::code`, -ways to publish code from anywhere in your smart contracts. However, -notice that code published in the current transaction can be executed -only after that transaction ends. - -The Aptos framework itself, including all the on-chain administration logic, is -an example for programmatic upgrade. The framework is marked as `compatible`. -Upgrades happen via specific generated governance scripts. For more details, -see [Aptos Governance](../../concepts/governance.md). diff --git a/developer-docs-site/docs/move/book/packages.md b/developer-docs-site/docs/move/book/packages.md deleted file mode 100644 index 92248bc8b7e95..0000000000000 --- a/developer-docs-site/docs/move/book/packages.md +++ /dev/null @@ -1,371 +0,0 @@ -# Packages - -Packages allow Move programmers to more easily re-use code and share it -across projects. The Move package system allows programmers to easily: -* Define a package containing Move code; -* Parameterize a package by [named addresses](./address.md); -* Import and use packages in other Move code and instantiate named addresses; -* Build packages and generate associated compilation artifacts from packages; and -* Work with a common interface around compiled Move artifacts. - -## Package Layout and Manifest Syntax - -A Move package source directory contains a `Move.toml` package manifest -file along with a set of subdirectories: - -``` -a_move_package -├── Move.toml (required) -├── sources (required) -├── examples (optional, test & dev mode) -├── scripts (optional) -├── doc_templates (optional) -└── tests (optional, test mode) -``` - -The directories marked `required` _must_ be present in order for the directory -to be considered a Move package and to be compiled. Optional directories can -be present, and if so will be included in the compilation process. Depending on -the mode that the package is built with (`test` or `dev`), the `tests` and -`examples` directories will be included as well. - -The `sources` directory can contain both Move modules and Move scripts (both -transaction scripts and modules containing script functions). The `examples` -directory can hold additional code to be used only for development and/or -tutorial purposes that will not be included when compiled outside `test` or -`dev` mode. - -A `scripts` directory is supported so transaction scripts can be separated -from modules if that is desired by the package author. The `scripts` -directory will always be included for compilation if it is present. -Documentation will be built using any documentation templates present in -the `doc_templates` directory. - -### Move.toml - -The Move package manifest is defined within the `Move.toml` file and has the -following syntax. Optional fields are marked with `*`, `+` denotes -one or more elements: - -``` -[package] -name = # e.g., "MoveStdlib" -version = ".." # e.g., "0.1.1" -license* = # e.g., "MIT", "GPL", "Apache 2.0" -authors* = [] # e.g., ["Joe Smith (joesmith@noemail.com)", "Jane Smith (janesmith@noemail.com)"] - -[addresses] # (Optional section) Declares named addresses in this package and instantiates named addresses in the package graph -# One or more lines declaring named addresses in the following format - = "_" | "" # e.g., std = "_" or my_addr = "0xC0FFEECAFE" - -[dependencies] # (Optional section) Paths to dependencies and instantiations or renamings of named addresses from each dependency -# One or more lines declaring dependencies in the following format - = { local = , addr_subst* = { ( = ( | ""))+ } } # local dependencies - = { git = , subdir=, rev=, addr_subst* = { ( = ( | ""))+ } } # git dependencies - -[dev-addresses] # (Optional section) Same as [addresses] section, but only included in "dev" and "test" modes -# One or more lines declaring dev named addresses in the following format - = "_" | "" # e.g., std = "_" or my_addr = "0xC0FFEECAFE" - -[dev-dependencies] # (Optional section) Same as [dependencies] section, but only included in "dev" and "test" modes -# One or more lines declaring dev dependencies in the following format - = { local = , addr_subst* = { ( = ( |
))+ } } -``` - -An example of a minimal package manifest with one local dependency and one git dependency: - -``` -[package] -name = "AName" -version = "0.0.0" -``` - -An example of a more standard package manifest that also includes the Move -standard library and instantiates the named address `Std` from it with the -address value `0x1`: - -``` -[package] -name = "AName" -version = "0.0.0" -license = "Apache 2.0" - -[addresses] -address_to_be_filled_in = "_" -specified_address = "0xB0B" - -[dependencies] -# Local dependency -LocalDep = { local = "projects/move-awesomeness", addr_subst = { "std" = "0x1" } } -# Git dependency -MoveStdlib = { git = "https://github.com/diem/diem.git", subdir="language/move-stdlib", rev = "56ab033cc403b489e891424a629e76f643d4fb6b" } - -[dev-addresses] # For use when developing this module -address_to_be_filled_in = "0x101010101" -``` - -Most of the sections in the package manifest are self explanatory, but named -addresses can be a bit difficult to understand so it's worth examining them in -a bit more detail. - -## Named Addresses During Compilation - -Recall that Move has [named addresses](./address.md) and that -named addresses cannot be declared in Move. Because of this, until now -named addresses and their values needed to be passed to the compiler on the -command line. With the Move package system this is no longer needed, and -you can declare named addresses in the package, instantiate other named -addresses in scope, and rename named addresses from other packages within -the Move package system manifest file. Let's go through each of these -individually: - -### Declaration - -Let's say we have a Move module in `example_pkg/sources/A.move` as follows: - -```move -module named_addr::A { - public fun x(): address { @named_addr } -} -``` - -We could in `example_pkg/Move.toml` declare the named address `named_addr` in -two different ways. The first: - -``` -[package] -name = "ExamplePkg" -... -[addresses] -named_addr = "_" -``` - -Declares `named_addr` as a named address in the package `ExamplePkg` and -that _this address can be any valid address value_. Therefore an importing -package can pick the value of the named address `named_addr` to be any address -it wishes. Intuitively you can think of this as parameterizing the package -`ExamplePkg` by the named address `named_addr`, and the package can then be -instantiated later on by an importing package. - -`named_addr` can also be declared as: - -``` -[package] -name = "ExamplePkg" -... -[addresses] -named_addr = "0xCAFE" -``` - -which states that the named address `named_addr` is exactly `0xCAFE` and cannot be -changed. This is useful so other importing packages can use this named -address without needing to worry about the exact value assigned to it. - -With these two different declaration methods, there are two ways that -information about named addresses can flow in the package graph: -* The former ("unassigned named addresses") allows named address values to flow - from the importation site to the declaration site. -* The latter ("assigned named addresses") allows named address values to flow - from the declaration site upwards in the package graph to usage sites. - -With these two methods for flowing named address information throughout the -package graph the rules around scoping and renaming become important to -understand. - -## Scoping and Renaming of Named Addresses - -A named address `N` in a package `P` is in scope if: -1. It declares a named address `N`; or -2. A package in one of `P`'s transitive dependencies declares the named address - `N` and there is a dependency path in the package graph between `P` and the - declaring package of `N` with no renaming of `N`. - -Additionally, every named address in a package is exported. Because of this and -the above scoping rules each package can be viewed as coming with a set of -named addresses that will be brought into scope when the package is imported, -e.g., if the `ExamplePkg` package was imported, that importation would bring -into scope the `named_addr` named address. Because of this, if `P` imports two -packages `P1` and `P2` both of which declare a named address `N` an issue -arises in `P`: which "`N`" is meant when `N` is referred to in `P`? The one -from `P1` or `P2`? To prevent this ambiguity around which package a named -address is coming from, we enforce that the sets of scopes introduced by all -dependencies in a package are disjoint, and provide a way to _rename named -addresses_ when the package that brings them into scope is imported. - -Renaming a named address when importing can be done as follows in our `P`, -`P1`, and `P2` example above: - -``` -[package] -name = "P" -... -[dependencies] -P1 = { local = "some_path_to_P1", addr_subst = { "P1N" = "N" } } -P2 = { local = "some_path_to_P2" } -``` - -With this renaming `N` refers to the `N` from `P2` and `P1N` will refer to `N` -coming from `P1`: - -``` -module N::A { - public fun x(): address { @P1N } -} -``` - -It is important to note that _renaming is not local_: once a named address `N` -has been renamed to `N2` in a package `P` all packages that import `P` will not -see `N` but only `N2` unless `N` is reintroduced from outside of `P`. This is -why rule (2) in the scoping rules at the start of this section specifies a -"dependency path in the package graph between `P` and the declaring -package of `N` with no renaming of `N`." - -### Instantiation - -Named addresses can be instantiated multiple times across the package graph as -long as it is always with the same value. It is an error if the same named -address (regardless of renaming) is instantiated with differing values across -the package graph. - -A Move package can only be compiled if all named addresses resolve to a value. -This presents issues if the package wishes to expose an uninstantiated named -address. This is what the `[dev-addresses]` section solves. This section can -set values for named addresses, but cannot introduce any named addresses. -Additionally, only the `[dev-addresses]` in the root package are included in -`dev` mode. For example a root package with the following manifest would not compile -outside of `dev` mode since `named_addr` would be uninstantiated: - -``` -[package] -name = "ExamplePkg" -... -[addresses] -named_addr = "_" - -[dev-addresses] -named_addr = "0xC0FFEE" -``` - -## Usage, Artifacts, and Data Structures - -The Move package system comes with a command line option as part of the Move -CLI `move `. Unless a -particular path is provided, all package commands will run in the current working -directory. The full list of commands and flags for the Move CLI can be found by -running `move --help`. - -### Usage - -A package can be compiled either through the Move CLI commands, or as a library -command in Rust with the function `compile_package`. This will create a -`CompiledPackage` that holds the compiled bytecode along with other compilation -artifacts (source maps, documentation, ABIs) in memory. This `CompiledPackage` -can be converted to an `OnDiskPackage` and vice versa -- the latter being the data of -the `CompiledPackage` laid out in the file system in the following format: - -``` -a_move_package -├── Move.toml -... -└── build - ├── - │   ├── BuildInfo.yaml - │   ├── bytecode_modules - │   │   └── *.mv - │   ├── source_maps - │   │   └── *.mvsm - │ ├── bytecode_scripts - │ │   └── *.mv - │ ├── abis - │ │   ├── *.abi - │ │   └── /*.abi - │   └── sources - │   └── *.move - ... - └── - ├── BuildInfo.yaml - ... - └── sources -``` - -See the `move-package` crate for more information on these data structures and -how to use the Move package system as a Rust library. - -## Using Bytecode for Dependencies - -Move bytecode can be used as dependencies when the Move source code for those dependencies are not available locally. To use this feature, you will need co-locate the files in directories at the same level and then specify their paths in the corresponding `Move.toml` files. - -## Requirements and limitations - -Using local bytecode as dependencies requires bytecode files to be downloaded locally, and the actual address for each named address must be specified in either `Move.toml` or through `--named-addresses`. - -Note, both `aptos move prove` and `aptos move test` commands, currently, do not support bytecode as dependencies. - -## Recommended structure - -We use an example to illustrate the dev flow of using this feature. Suppose we want to compile the package `A`. The package layout is: -```rust -./A -├── Move.toml -├── sources - ├ AModule.move -``` - -`A.move` is defined below, depending on the modules `Bar` and `Foo`: - -```rust -module A::AModule { - use B::Bar; - use C::Foo; - public fun foo(): u64 { - Bar::foo() + Foo::bar() - } -} -``` - -Suppose the source of `Bar` and `Foo` are not available but the corresponding bytecode `Bar.mv` and `Foo.mv` are available locally. To use them as dependencies, we would: - -Specify `Move.toml` for `Bar` and `Foo`. Note that named addresses are already instantiated with the actual address in the bytecode. In our example, the actual address for `C` is already bound to `0x3`. As a result, `[addresses]` must be specified `C` as `0x3`, as shown below: - -```rust -[package] -name = "Foo" -version = "0.0.0" -[addresses] -C = "0x3" -``` - -Place the bytecode file and the corresponding `Move.toml` file in the same directory with the bytecode in a `build` subdirectory. Note an empty `sources` directory is **required**. For instance, the layout of the folder `B` (for the package `Bar`) and `C` (for the package `Foo`) would resemble: - -```rust -./B -├── Move.toml -├── sources -├── build - ├ Bar.mv -``` - -```rust -./C -├── Move.toml -├── sources -├── build - ├── Foo - ├──bytecode_modules - ├ Foo.mv -``` - -Specify `[dependencies]` in the `Move.toml` of the target (first) package with the location of the dependent (secondary) packages. For instance, assuming all three package directories are at the same level, `Move.toml` of `A` would resemble: - -```rust -[package] -name = "A" -version = "0.0.0" -[addresses] -A = "0x2" -[dependencies] -Bar = { local = "../B" } -Foo = { local = "../C" } -``` - -Note that if both the bytecode and the source code of the same package exist in the search paths, the compiler will complain that the declaration is duplicated. diff --git a/developer-docs-site/docs/move/book/references.md b/developer-docs-site/docs/move/book/references.md deleted file mode 100644 index 57a5b99e1f912..0000000000000 --- a/developer-docs-site/docs/move/book/references.md +++ /dev/null @@ -1,235 +0,0 @@ -# References - -Move has two types of references: immutable `&` and mutable `&mut`. Immutable references are read -only, and cannot modify the underlying value (or any of its fields). Mutable references allow for -modifications via a write through that reference. Move's type system enforces an ownership -discipline that prevents reference errors. - -For more details on the rules of references, see [Structs and Resources](./structs-and-resources.md) - -## Reference Operators - -Move provides operators for creating and extending references as well as converting a mutable -reference to an immutable one. Here and elsewhere, we use the notation `e: T` for "expression `e` -has type `T`". - -| Syntax | Type | Description | -| ----------- | ----------------------------------------------------- | -------------------------------------------------------------- | -| `&e` | `&T` where `e: T` and `T` is a non-reference type | Create an immutable reference to `e` | -| `&mut e` | `&mut T` where `e: T` and `T` is a non-reference type | Create a mutable reference to `e`. | -| `&e.f` | `&T` where `e.f: T` | Create an immutable reference to field `f` of struct `e`. | -| `&mut e.f` | `&mut T` where `e.f: T` | Create a mutable reference to field `f` of struct`e`. | -| `freeze(e)` | `&T` where `e: &mut T` | Convert the mutable reference `e` into an immutable reference. | - -The `&e.f` and `&mut e.f` operators can be used both to create a new reference into a struct or to -extend an existing reference: - -```move -let s = S { f: 10 }; -let f_ref1: &u64 = &s.f; // works -let s_ref: &S = &s; -let f_ref2: &u64 = &s_ref.f // also works -``` - -A reference expression with multiple fields works as long as both structs are in the same module: - -```move -struct A { b: B } -struct B { c : u64 } -fun f(a: &A): &u64 { - &a.b.c -} -``` - -Finally, note that references to references are not allowed: - -```move -let x = 7; -let y: &u64 = &x; -let z: &&u64 = &y; // will not compile -``` - -## Reading and Writing Through References - -Both mutable and immutable references can be read to produce a copy of the referenced value. - -Only mutable references can be written. A write `*x = v` discards the value previously stored in `x` -and updates it with `v`. - -Both operations use the C-like `*` syntax. However, note that a read is an expression, whereas a -write is a mutation that must occur on the left hand side of an equals. - -| Syntax | Type | Description | -| ---------- | ----------------------------------- | ----------------------------------- | -| `*e` | `T` where `e` is `&T` or `&mut T` | Read the value pointed to by `e` | -| `*e1 = e2` | `()` where `e1: &mut T` and `e2: T` | Update the value in `e1` with `e2`. | - -In order for a reference to be read, the underlying type must have the -[`copy` ability](./abilities.md) as reading the reference creates a new copy of the value. This rule -prevents the copying of resource values: - -```move= -fun copy_resource_via_ref_bad(c: Coin) { - let c_ref = &c; - let counterfeit: Coin = *c_ref; // not allowed! - pay(c); - pay(counterfeit); -} -``` - -Dually: in order for a reference to be written to, the underlying type must have the -[`drop` ability](./abilities.md) as writing to the reference will discard (or "drop") the old value. -This rule prevents the destruction of resource values: - -```move= -fun destroy_resource_via_ref_bad(ten_coins: Coin, c: Coin) { - let ref = &mut ten_coins; - *ref = c; // not allowed--would destroy 10 coins! -} -``` - -## `freeze` inference - -A mutable reference can be used in a context where an immutable reference is expected: - -```move -let x = 7; -let y: &u64 = &mut x; -``` - -This works because the under the hood, the compiler inserts `freeze` instructions where they are -needed. Here are a few more examples of `freeze` inference in action: - -```move= -fun takes_immut_returns_immut(x: &u64): &u64 { x } - -// freeze inference on return value -fun takes_mut_returns_immut(x: &mut u64): &u64 { x } - -fun expression_examples() { - let x = 0; - let y = 0; - takes_immut_returns_immut(&x); // no inference - takes_immut_returns_immut(&mut x); // inferred freeze(&mut x) - takes_mut_returns_immut(&mut x); // no inference - - assert!(&x == &mut y, 42); // inferred freeze(&mut y) -} - -fun assignment_examples() { - let x = 0; - let y = 0; - let imm_ref: &u64 = &x; - - imm_ref = &x; // no inference - imm_ref = &mut y; // inferred freeze(&mut y) -} -``` - -### Subtyping - -With this `freeze` inference, the Move type checker can view `&mut T` as a subtype of `&T`. As shown -above, this means that anywhere for any expression where a `&T` value is used, a `&mut T` value can -also be used. This terminology is used in error messages to concisely indicate that a `&mut T` was -needed where a `&T` was supplied. For example - -```move= -address 0x42 { -module example { - fun read_and_assign(store: &mut u64, new_value: &u64) { - *store = *new_value - } - - fun subtype_examples() { - let x: &u64 = &0; - let y: &mut u64 = &mut 1; - - x = &mut 1; // valid - y = &2; // invalid! - - read_and_assign(y, x); // valid - read_and_assign(x, y); // invalid! - } -} -} -``` - -will yield the following error messages - -```text -error: - - ┌── example.move:12:9 ─── - │ - 12 │ y = &2; // invalid! - │ ^ Invalid assignment to local 'y' - · - 12 │ y = &2; // invalid! - │ -- The type: '&{integer}' - · - 9 │ let y: &mut u64 = &mut 1; - │ -------- Is not a subtype of: '&mut u64' - │ - -error: - - ┌── example.move:15:9 ─── - │ - 15 │ read_and_assign(x, y); // invalid! - │ ^^^^^^^^^^^^^^^^^^^^^ Invalid call of '0x42::example::read_and_assign'. Invalid argument for parameter 'store' - · - 8 │ let x: &u64 = &0; - │ ---- The type: '&u64' - · - 3 │ fun read_and_assign(store: &mut u64, new_value: &u64) { - │ -------- Is not a subtype of: '&mut u64' - │ -``` - -The only other types currently that has subtyping are [tuples](./tuples.md) - -## Ownership - -Both mutable and immutable references can always be copied and extended _even if there are existing -copies or extensions of the same reference_: - -```move -fun reference_copies(s: &mut S) { - let s_copy1 = s; // ok - let s_extension = &mut s.f; // also ok - let s_copy2 = s; // still ok - ... -} -``` - -This might be surprising for programmers familiar with Rust's ownership system, which would reject -the code above. Move's type system is more permissive in its treatment of -[copies](./variables.md#move-and-copy), but equally strict in ensuring unique ownership of mutable -references before writes. - -### References Cannot Be Stored - -References and tuples are the _only_ types that cannot be stored as a field value of structs, which -also means that they cannot exist in global storage. All references created during program execution -will be destroyed when a Move program terminates; they are entirely ephemeral. This invariant is -also true for values of types without the `store` [ability](./abilities.md), but note that -references and tuples go a step further by never being allowed in structs in the first place. - -This is another difference between Move and Rust, which allows references to be stored inside of -structs. - -Currently, Move cannot support this because references cannot be -[serialized](https://en.wikipedia.org/wiki/Serialization), but _every Move value must be -serializable_. This requirement comes from Move's -[persistent global storage](./global-storage-structure.md), which needs to serialize values to -persist them across program executions. Structs can be written to global storage, and thus they must -be serializable. - -One could imagine a fancier, more expressive, type system that would allow references to be stored -in structs _and_ ban those structs from existing in global storage. We could perhaps allow -references inside of structs that do not have the `store` [ability](./abilities.md), but that would -not completely solve the problem: Move has a fairly complex system for tracking static reference -safety, and this aspect of the type system would also have to be extended to support storing -references inside of structs. In short, Move's type system (particularly the aspects around -reference safety) would have to expand to support stored references. But it is something we are -keeping an eye on as the language evolves. diff --git a/developer-docs-site/docs/move/book/signer.md b/developer-docs-site/docs/move/book/signer.md deleted file mode 100644 index 820963c8b249a..0000000000000 --- a/developer-docs-site/docs/move/book/signer.md +++ /dev/null @@ -1,73 +0,0 @@ -# Signer - -`signer` is a built-in Move resource type. A `signer` is a -[capability](https://en.wikipedia.org/wiki/Object-capability_model) that allows the holder to act on -behalf of a particular `address`. You can think of the native implementation as being: - -```move -struct signer has drop { a: address } -``` - -A `signer` is somewhat similar to a Unix [UID](https://en.wikipedia.org/wiki/User_identifier) in -that it represents a user authenticated by code _outside_ of Move (e.g., by checking a cryptographic -signature or password). - -## Comparison to `address` - -A Move program can create any `address` value without special permission using address literals: - -```move -let a1 = @0x1; -let a2 = @0x2; -// ... and so on for every other possible address -``` - -However, `signer` values are special because they cannot be created via literals or -instructions--only by the Move VM. Before the VM runs a script with parameters of type `signer`, it -will automatically create `signer` values and pass them into the script: - -```move -script { - use std::signer; - fun main(s: signer) { - assert!(signer::address_of(&s) == @0x42, 0); - } -} -``` - -This script will abort with code `0` if the script is sent from any address other than `0x42`. - -A transaction script can have an arbitrary number of `signer`s as long as the `signer`s are a prefix -to any other arguments. In other words, all of the `signer` arguments must come first: - -```move -script { - use std::signer; - fun main(s1: signer, s2: signer, x: u64, y: u8) { - // ... - } -} -``` - -This is useful for implementing _multi-signer scripts_ that atomically act with the authority of -multiple parties. For example, an extension of the script above could perform an atomic currency -swap between `s1` and `s2`. - -## `signer` Operators - -The `std::signer` standard library module provides two utility functions over `signer` values: - -| Function | Description | -| ------------------------------------------- | ------------------------------------------------------------- | -| `signer::address_of(&signer): address` | Return the `address` wrapped by this `&signer`. | -| `signer::borrow_address(&signer): &address` | Return a reference to the `address` wrapped by this `&signer`. | - -In addition, the `move_to(&signer, T)` [global storage operator](./global-storage-operators.md) -requires a `&signer` argument to publish a resource `T` under `signer.address`'s account. This -ensures that only an authenticated user can elect to publish a resource under their `address`. - -## Ownership - -Unlike simple scalar values, `signer` values are not copyable, meaning they cannot be copied (from -any operation whether it be through an explicit [`copy`](./variables.md#move-and-copy) instruction -or through a [dereference `*`](./references.md#reading-and-writing-through-references). diff --git a/developer-docs-site/docs/move/book/standard-library.md b/developer-docs-site/docs/move/book/standard-library.md deleted file mode 100644 index 6307159c00688..0000000000000 --- a/developer-docs-site/docs/move/book/standard-library.md +++ /dev/null @@ -1,3 +0,0 @@ -# Libraries - -Aptos provides multiple useful libraries for developers. The complete up-to-date docs can be found [here](../../reference/move.md). diff --git a/developer-docs-site/docs/move/book/structs-and-resources.md b/developer-docs-site/docs/move/book/structs-and-resources.md deleted file mode 100644 index 5ab254af015ac..0000000000000 --- a/developer-docs-site/docs/move/book/structs-and-resources.md +++ /dev/null @@ -1,527 +0,0 @@ -# Structs and Resources - -A _struct_ is a user-defined data structure containing typed fields. Structs can store any -non-reference type, including other structs. - -We often refer to struct values as _resources_ if they cannot be copied and cannot be dropped. In -this case, resource values must have ownership transferred by the end of the function. This property -makes resources particularly well served for defining global storage schemas or for representing -important values (such as a token). - -By default, structs are linear and ephemeral. By this we mean that they: cannot be copied, cannot be -dropped, and cannot be stored in global storage. This means that all values have to have ownership -transferred (linear) and the values must be dealt with by the end of the program's execution -(ephemeral). We can relax this behavior by giving the struct [abilities](./abilities.md) which allow -values to be copied or dropped and also to be stored in global storage or to define global storage -schemas. - -## Defining Structs - -Structs must be defined inside a module: - -```move -address 0x2 { -module m { - struct Foo { x: u64, y: bool } - struct Bar {} - struct Baz { foo: Foo, } - // ^ note: it is fine to have a trailing comma -} -} -``` - -Structs cannot be recursive, so the following definition is invalid: - -```move -struct Foo { x: Foo } -// ^ error! Foo cannot contain Foo -``` - -As mentioned above: by default, a struct declaration is linear and ephemeral. So to allow the value -to be used with certain operations (that copy it, drop it, store it in global storage, or use it as -a storage schema), structs can be granted [abilities](./abilities.md) by annotating them with -`has `: - -```move -address 0x2 { -module m { - struct Foo has copy, drop { x: u64, y: bool } -} -} -``` - -For more details, see the [annotating structs](./abilities.md#annotating-structs) section. - -### Naming - -Structs must start with a capital letter `A` to `Z`. After the first letter, struct names can -contain underscores `_`, letters `a` to `z`, letters `A` to `Z`, or digits `0` to `9`. - -```move -struct Foo {} -struct BAR {} -struct B_a_z_4_2 {} -``` - -This naming restriction of starting with `A` to `Z` is in place to give room for future language -features. It may or may not be removed later. - -## Using Structs - -### Creating Structs - -Values of a struct type can be created (or "packed") by indicating the struct name, followed by -value for each field: - -```move -address 0x2 { -module m { - struct Foo has drop { x: u64, y: bool } - struct Baz has drop { foo: Foo } - - fun example() { - let foo = Foo { x: 0, y: false }; - let baz = Baz { foo: foo }; - } -} -} -``` - -If you initialize a struct field with a local variable whose name is the same as the field, you can -use the following shorthand: - -```move -let baz = Baz { foo: foo }; -// is equivalent to -let baz = Baz { foo }; -``` - -This is called sometimes called "field name punning". - -### Destroying Structs via Pattern Matching - -Struct values can be destroyed by binding or assigning them patterns. - -```move -address 0x2 { -module m { - struct Foo { x: u64, y: bool } - struct Bar { foo: Foo } - struct Baz {} - - fun example_destroy_foo() { - let foo = Foo { x: 3, y: false }; - let Foo { x, y: foo_y } = foo; - // ^ shorthand for `x: x` - - // two new bindings - // x: u64 = 3 - // foo_y: bool = false - } - - fun example_destroy_foo_wildcard() { - let foo = Foo { x: 3, y: false }; - let Foo { x, y: _ } = foo; - - // only one new binding since y was bound to a wildcard - // x: u64 = 3 - } - - fun example_destroy_foo_assignment() { - let x: u64; - let y: bool; - Foo { x, y } = Foo { x: 3, y: false }; - - // mutating existing variables x & y - // x = 3, y = false - } - - fun example_foo_ref() { - let foo = Foo { x: 3, y: false }; - let Foo { x, y } = &foo; - - // two new bindings - // x: &u64 - // y: &bool - } - - fun example_foo_ref_mut() { - let foo = Foo { x: 3, y: false }; - let Foo { x, y } = &mut foo; - - // two new bindings - // x: &mut u64 - // y: &mut bool - } - - fun example_destroy_bar() { - let bar = Bar { foo: Foo { x: 3, y: false } }; - let Bar { foo: Foo { x, y } } = bar; - // ^ nested pattern - - // two new bindings - // x: u64 = 3 - // y: bool = false - } - - fun example_destroy_baz() { - let baz = Baz {}; - let Baz {} = baz; - } -} -} -``` - -### Borrowing Structs and Fields - -The `&` and `&mut` operator can be used to create references to structs or fields. These examples -include some optional type annotations (e.g., `: &Foo`) to demonstrate the type of operations. - -```move -let foo = Foo { x: 3, y: true }; -let foo_ref: &Foo = &foo; -let y: bool = foo_ref.y; // reading a field via a reference to the struct -let x_ref: &u64 = &foo.x; - -let x_ref_mut: &mut u64 = &mut foo.x; -*x_ref_mut = 42; // modifying a field via a mutable reference -``` - -It is possible to borrow inner fields of nested structs: - -```move -let foo = Foo { x: 3, y: true }; -let bar = Bar { foo }; - -let x_ref = &bar.foo.x; -``` - -You can also borrow a field via a reference to a struct: - -```move -let foo = Foo { x: 3, y: true }; -let foo_ref = &foo; -let x_ref = &foo_ref.x; -// this has the same effect as let x_ref = &foo.x -``` - -### Reading and Writing Fields - -If you need to read and copy a field's value, you can then dereference the borrowed field: - -```move -let foo = Foo { x: 3, y: true }; -let bar = Bar { foo: copy foo }; -let x: u64 = *&foo.x; -let y: bool = *&foo.y; -let foo2: Foo = *&bar.foo; -``` - -If the field is implicitly copyable, the dot operator can be used to read fields of a struct without -any borrowing. (Only scalar values with the `copy` ability are implicitly copyable.) - -```move -let foo = Foo { x: 3, y: true }; -let x = foo.x; // x == 3 -let y = foo.y; // y == true -``` - -Dot operators can be chained to access nested fields: - -```move -let baz = Baz { foo: Foo { x: 3, y: true } }; -let x = baz.foo.x; // x = 3; -``` - -However, this is not permitted for fields that contain non-primitive types, such a vector or another -struct: - -```move -let foo = Foo { x: 3, y: true }; -let bar = Bar { foo }; -let foo2: Foo = *&bar.foo; -let foo3: Foo = bar.foo; // error! must add an explicit copy with *& -``` - -The reason behind this design decision is that copying a vector or another struct might be an -expensive operation. It is important for a programmer to be aware of this copy and make others aware -with the explicit syntax `*&`. - -In addition reading from fields, the dot syntax can be used to modify fields, regardless of the -field being a primitive type or some other struct. - -```move -let foo = Foo { x: 3, y: true }; -foo.x = 42; // foo = Foo { x: 42, y: true } -foo.y = !foo.y; // foo = Foo { x: 42, y: false } -let bar = Bar { foo }; // bar = Bar { foo: Foo { x: 42, y: false } } -bar.foo.x = 52; // bar = Bar { foo: Foo { x: 52, y: false } } -bar.foo = Foo { x: 62, y: true }; // bar = Bar { foo: Foo { x: 62, y: true } } -``` - -The dot syntax also works via a reference to a struct: - -```move -let foo = Foo { x: 3, y: true }; -let foo_ref = &mut foo; -foo_ref.x = foo_ref.x + 1; -``` - -## Privileged Struct Operations - -Most struct operations on a struct type `T` can only be performed inside the module that declares -`T`: - -- Struct types can only be created ("packed"), destroyed ("unpacked") inside the module that defines - the struct. -- The fields of a struct are only accessible inside the module that defines the struct. - -Following these rules, if you want to modify your struct outside the module, you will need to -provide public APIs for them. The end of the chapter contains some examples of this. - -However, struct _types_ are always visible to another module or script: - -```move -// m.move -address 0x2 { -module m { - struct Foo has drop { x: u64 } - - public fun new_foo(): Foo { - Foo { x: 42 } - } -} -} -``` - -```move -// n.move -address 0x2 { -module n { - use 0x2::m; - - struct Wrapper has drop { - foo: m::Foo - } - - fun f1(foo: m::Foo) { - let x = foo.x; - // ^ error! cannot access fields of `foo` here - } - - fun f2() { - let foo_wrapper = Wrapper { foo: m::new_foo() }; - } -} -} -``` - -Note that structs do not have visibility modifiers (e.g., `public` or `private`). - -## Ownership - -As mentioned above in [Defining Structs](#defining-structs), structs are by default linear and -ephemeral. This means they cannot be copied or dropped. This property can be very useful when -modeling real world resources like money, as you do not want money to be duplicated or get lost in -circulation. - -```move -address 0x2 { -module m { - struct Foo { x: u64 } - - public fun copying_resource() { - let foo = Foo { x: 100 }; - let foo_copy = copy foo; // error! 'copy'-ing requires the 'copy' ability - let foo_ref = &foo; - let another_copy = *foo_ref // error! dereference requires the 'copy' ability - } - - public fun destroying_resource1() { - let foo = Foo { x: 100 }; - - // error! when the function returns, foo still contains a value. - // This destruction requires the 'drop' ability - } - - public fun destroying_resource2(f: &mut Foo) { - *f = Foo { x: 100 } // error! - // destroying the old value via a write requires the 'drop' ability - } -} -} -``` - -To fix the second example (`fun destroying_resource1`), you would need to manually "unpack" the -resource: - -```move -address 0x2 { -module m { - struct Foo { x: u64 } - - public fun destroying_resource1_fixed() { - let foo = Foo { x: 100 }; - let Foo { x: _ } = foo; - } -} -} -``` - -Recall that you are only able to deconstruct a resource within the module in which it is defined. -This can be leveraged to enforce certain invariants in a system, for example, conservation of money. - -If on the other hand, your struct does not represent something valuable, you can add the abilities -`copy` and `drop` to get a struct value that might feel more familiar from other programming -languages: - -```move -address 0x2 { -module m { - struct Foo has copy, drop { x: u64 } - - public fun run() { - let foo = Foo { x: 100 }; - let foo_copy = copy foo; - // ^ this code copies foo, whereas `let x = foo` or - // `let x = move foo` both move foo - - let x = foo.x; // x = 100 - let x_copy = foo_copy.x; // x = 100 - - // both foo and foo_copy are implicitly discarded when the function returns - } -} -} -``` - -## Storing Resources in Global Storage - -Only structs with the `key` ability can be saved directly in -[persistent global storage](./global-storage-operators.md). All values stored within those `key` -structs must have the `store` ability. See the [ability](./abilities) and -[global storage](./global-storage-operators.md) chapters for more detail. - -## Examples - -Here are two short examples of how you might use structs to represent valuable data (in the case of -`Coin`) or more classical data (in the case of `Point` and `Circle`). - -### Example 1: Coin - - - -```move -address 0x2 { -module m { - // We do not want the Coin to be copied because that would be duplicating this "money", - // so we do not give the struct the 'copy' ability. - // Similarly, we do not want programmers to destroy coins, so we do not give the struct the - // 'drop' ability. - // However, we *want* users of the modules to be able to store this coin in persistent global - // storage, so we grant the struct the 'store' ability. This struct will only be inside of - // other resources inside of global storage, so we do not give the struct the 'key' ability. - struct Coin has store { - value: u64, - } - - public fun mint(value: u64): Coin { - // You would want to gate this function with some form of access control to prevent - // anyone using this module from minting an infinite amount of coins. - Coin { value } - } - - public fun withdraw(coin: &mut Coin, amount: u64): Coin { - assert!(coin.balance >= amount, 1000); - coin.value = coin.value - amount; - Coin { value: amount } - } - - public fun deposit(coin: &mut Coin, other: Coin) { - let Coin { value } = other; - coin.value = coin.value + value; - } - - public fun split(coin: Coin, amount: u64): (Coin, Coin) { - let other = withdraw(&mut coin, amount); - (coin, other) - } - - public fun merge(coin1: Coin, coin2: Coin): Coin { - deposit(&mut coin1, coin2); - coin1 - } - - public fun destroy_zero(coin: Coin) { - let Coin { value } = coin; - assert!(value == 0, 1001); - } -} -} -``` - -### Example 2: Geometry - -```move -address 0x2 { -module point { - struct Point has copy, drop, store { - x: u64, - y: u64, - } - - public fun new(x: u64, y: u64): Point { - Point { - x, y - } - } - - public fun x(p: &Point): u64 { - p.x - } - - public fun y(p: &Point): u64 { - p.y - } - - fun abs_sub(a: u64, b: u64): u64 { - if (a < b) { - b - a - } - else { - a - b - } - } - - public fun dist_squared(p1: &Point, p2: &Point): u64 { - let dx = abs_sub(p1.x, p2.x); - let dy = abs_sub(p1.y, p2.y); - dx*dx + dy*dy - } -} -} -``` - -```move -address 0x2 { -module circle { - use 0x2::point::{Self, Point}; - - struct Circle has copy, drop, store { - center: Point, - radius: u64, - } - - public fun new(center: Point, radius: u64): Circle { - Circle { center, radius } - } - - public fun overlaps(c1: &Circle, c2: &Circle): bool { - let dist_squared_value = point::dist_squared(&c1.center, &c2.center); - let r1 = c1.radius; - let r2 = c2.radius; - dist_squared_value <= r1*r1 + 2*r1*r2 + r2*r2 - } -} -} -``` diff --git a/developer-docs-site/docs/move/book/tuples.md b/developer-docs-site/docs/move/book/tuples.md deleted file mode 100644 index e0f8d85e2b4a1..0000000000000 --- a/developer-docs-site/docs/move/book/tuples.md +++ /dev/null @@ -1,137 +0,0 @@ -# Tuples and Unit - -Move does not fully support tuples as one might expect coming from another language with them as a -[first-class value](https://en.wikipedia.org/wiki/First-class_citizen). However, in order to support multiple return values, Move has tuple-like -expressions. These expressions do not result in a concrete value at runtime (there are no tuples in -the bytecode), and as a result they are very limited: they can only appear in expressions (usually -in the return position for a function); they cannot be bound to local variables; they cannot be -stored in structs; and tuple types cannot be used to instantiate generics. - -Similarly, [unit `()`](https://en.wikipedia.org/wiki/Unit_type) is a type created by the Move source language in order to be expression based. -The unit value `()` does not result in any runtime value. We can consider unit`()` to be an empty -tuple, and any restrictions that apply to tuples also apply to unit. - -It might feel weird to have tuples in the language at all given these restrictions. But one of the -most common use cases for tuples in other languages is for functions to allow functions to return -multiple values. Some languages work around this by forcing the users to write structs that contain -the multiple return values. However in Move, you cannot put references inside of -[structs](./structs-and-resources.md). This required Move to support multiple return values. These -multiple return values are all pushed on the stack at the bytecode level. At the source level, these -multiple return values are represented using tuples. - -## Literals - -Tuples are created by a comma separated list of expressions inside of parentheses. - -| Syntax | Type | Description | -| --------------- | ---------------------------------------------------------------------------- | ------------------------------------------------------------ | -| `()` | `(): ()` | Unit, the empty tuple, or the tuple of arity 0 | -| `(e1, ..., en)` | `(e1, ..., en): (T1, ..., Tn)` where `e_i: Ti` s.t. `0 < i <= n` and `n > 0` | A `n`-tuple, a tuple of arity `n`, a tuple with `n` elements | - -Note that `(e)` does not have type `(e): (t)`, in other words there is no tuple with one element. If -there is only a single element inside of the parentheses, the parentheses are only used for -disambiguation and do not carry any other special meaning. - -Sometimes, tuples with two elements are called "pairs" and tuples with three elements are called -"triples." - -### Examples - -```move -address 0x42 { -module example { - // all 3 of these functions are equivalent - - // when no return type is provided, it is assumed to be `()` - fun returns_unit_1() { } - - // there is an implicit () value in empty expression blocks - fun returns_unit_2(): () { } - - // explicit version of `returns_unit_1` and `returns_unit_2` - fun returns_unit_3(): () { () } - - - fun returns_3_values(): (u64, bool, address) { - (0, false, @0x42) - } - fun returns_4_values(x: &u64): (&u64, u8, u128, vector) { - (x, 0, 1, b"foobar") - } -} -} -``` - -## Operations - -The only operation that can be done on tuples currently is destructuring. - -### Destructuring - -For tuples of any size, they can be destructured in either a `let` binding or in an assignment. - -For example: - -```move -address 0x42 { -module example { - // all 3 of these functions are equivalent - fun returns_unit() {} - fun returns_2_values(): (bool, bool) { (true, false) } - fun returns_4_values(x: &u64): (&u64, u8, u128, vector) { (x, 0, 1, b"foobar") } - - fun examples(cond: bool) { - let () = (); - let (x, y): (u8, u64) = (0, 1); - let (a, b, c, d) = (@0x0, 0, false, b""); - - () = (); - (x, y) = if (cond) (1, 2) else (3, 4); - (a, b, c, d) = (@0x1, 1, true, b"1"); - } - - fun examples_with_function_calls() { - let () = returns_unit(); - let (x, y): (bool, bool) = returns_2_values(); - let (a, b, c, d) = returns_4_values(&0); - - () = returns_unit(); - (x, y) = returns_2_values(); - (a, b, c, d) = returns_4_values(&1); - } -} -} -``` - -For more details, see [Move Variables](./variables.md). - -## Subtyping - -Along with references, tuples are the only other type that have [subtyping](https://en.wikipedia.org/wiki/Subtyping) in Move. Tuples have -subtyping only in the sense that they subtype with references (in a covariant way). - -For example: - -```move -let x: &u64 = &0; -let y: &mut u64 = &mut 1; - -// (&u64, &mut u64) is a subtype of (&u64, &u64) -// since &mut u64 is a subtype of &u64 -let (a, b): (&u64, &u64) = (x, y); - -// (&mut u64, &mut u64) is a subtype of (&u64, &u64) -// since &mut u64 is a subtype of &u64 -let (c, d): (&u64, &u64) = (y, y); - -// error! (&u64, &mut u64) is NOT a subtype of (&mut u64, &mut u64) -// since &u64 is NOT a subtype of &mut u64 -let (e, f): (&mut u64, &mut u64) = (x, y); -``` - -## Ownership - -As mentioned above, tuple values don't really exist at runtime. And currently they cannot be stored -into local variables because of this (but it is likely that this feature will come soon). As such, -tuples can only be moved currently, as copying them would require putting them into a local variable -first. diff --git a/developer-docs-site/docs/move/book/unit-testing.md b/developer-docs-site/docs/move/book/unit-testing.md deleted file mode 100644 index 7c09df88d4b6f..0000000000000 --- a/developer-docs-site/docs/move/book/unit-testing.md +++ /dev/null @@ -1,227 +0,0 @@ -# Unit Tests - -Unit testing for Move adds three new annotations to the Move source language: - -* `#[test]` -* `#[test_only]`, and -* `#[expected_failure]`. - -They respectively mark a function as a test, mark a module or module member (`use`, function, or struct) as code to be included for testing only, and mark that a test is expected to fail. These annotations can be placed on a function with any visibility. Whenever a module or module member is annotated as `#[test_only]` or `#[test]`, it will not be included in the compiled bytecode unless it is compiled for testing. - -## Testing Annotations: Their Meaning and Usage - -Both the `#[test]` and `#[expected_failure]` annotations can be used either with or without arguments. - -Without arguments, the `#[test]` annotation can only be placed on a function with no parameters. This annotation simply marks this function as a test to be run by the unit testing harness. - -``` -#[test] // OK -fun this_is_a_test() { ... } - -#[test] // Will fail to compile since the test takes an argument -fun this_is_not_correct(arg: signer) { ... } -``` - -A test can also be annotated as an `#[expected_failure]`. This annotation marks that the test should is expected to raise an error. -You can ensure that a test is aborting with a specific abort `` in the module ``. -by annotating it with `#[expected_failure(abort_code = , location = )]`, -if it then fails with a different abort code, in a different module or with a non-abort error the test will fail. Note that `` can be `Self`(in the current module) or a qualified name, e.g. `vector::std`. -Only functions that have the `#[test]` annotation can also be annotated as an #`[expected_failure]`. - -``` -#[test] -#[expected_failure] -public fun this_test_will_abort_and_pass() { abort 1 } - -#[test] -#[expected_failure] -public fun test_will_error_and_pass() { 1/0; } - -#[test] -#[expected_failure(abort_code = 0, location = Self)] -public fun test_will_error_and_fail() { 1/0; } - -#[test, expected_failure] // Can have multiple in one attribute. This test will pass. -public fun this_other_test_will_abort_and_pass() { abort 1 } -``` - -With arguments, a test annotation takes the form `#[test( =
, ..., =
)]`. If a function is annotated in such a manner, the function's parameters must be a permutation of the parameters <`param_name_1>, ..., `, i.e., the order of these parameters as they occur in the function and their order in the test annotation do not have to be the same, but they must be able to be matched up with each other by name. - -Only parameters with a type of `signer` are supported as test parameters. If a non-`signer` parameter is supplied, the test will result in an error when run. - -``` -#[test(arg = @0xC0FFEE)] // OK -fun this_is_correct_now(arg: signer) { ... } - -#[test(wrong_arg_name = @0xC0FFEE)] // Not correct: arg name doesn't match -fun this_is_incorrect(arg: signer) { ... } - -#[test(a = @0xC0FFEE, b = @0xCAFE)] // OK. We support multiple signer arguments, but you must always provide a value for that argument -fun this_works(a: signer, b: signer) { ... } - -// somewhere a named address is declared -#[test_only] // test-only named addresses are supported -address TEST_NAMED_ADDR = @0x1; -... -#[test(arg = @TEST_NAMED_ADDR)] // Named addresses are supported! -fun this_is_correct_now(arg: signer) { ... } -``` - -A module and any of its members can be declared as test only. In such a case the item will only be included in the compiled Move bytecode when compiled in test mode. Additionally, when compiled outside of test mode, any non-test `use`s of a `#[test_only]` module will raise an error during compilation. - -``` -#[test_only] // test only attributes can be attached to modules -module abc { ... } - -#[test_only] // test only attributes can be attached to named addresses -address ADDR = @0x1; - -#[test_only] // .. to uses -use 0x1::some_other_module; - -#[test_only] // .. to structs -struct SomeStruct { ... } - -#[test_only] // .. and functions. Can only be called from test code, but not a test -fun test_only_function(...) { ... } -``` - -## Running Unit Tests - -Unit tests for a Move package can be run with the [`aptos move test` -command](./packages.md). - -When running tests, every test will either `PASS`, `FAIL`, or `TIMEOUT`. If a test case fails, the location of the failure along with the function name that caused the failure will be reported if possible. You can see an example of this below. - -A test will be marked as timing out if it exceeds the maximum number of instructions that can be executed for any single test. This bound can be changed using the options below, and its default value is set to 100000 instructions. Additionally, while the result of a test is always deterministic, tests are run in parallel by default, so the ordering of test results in a test run is non-deterministic unless running with only one thread (see `OPTIONS` below). - -There are also a number of options that can be passed to the unit testing binary to fine-tune testing and to help debug failing tests. These can be found using the help flag: - -``` -$ aptos move test -h -``` - -## Example - -A simple module using some of the unit testing features is shown in the following example: - -First create an empty package inside an empty directory: - -``` -$ aptos move init --name TestExample -``` - -Next add the following to the `Move.toml`: - -``` -[dependencies] -MoveStdlib = { git = "https://github.com/aptos-labs/aptos-core.git", subdir="aptos-move/framework/move-stdlib", rev = "main", addr_subst = { "std" = "0x1" } } -``` - -Next add the following module under the `sources` directory: - -``` -// filename: sources/my_module.move -module 0x1::my_module { - - struct MyCoin has key { value: u64 } - - public fun make_sure_non_zero_coin(coin: MyCoin): MyCoin { - assert!(coin.value > 0, 0); - coin - } - - public fun has_coin(addr: address): bool { - exists(addr) - } - - #[test] - fun make_sure_non_zero_coin_passes() { - let coin = MyCoin { value: 1 }; - let MyCoin { value: _ } = make_sure_non_zero_coin(coin); - } - - #[test] - // Or #[expected_failure] if we don't care about the abort code - #[expected_failure(abort_code = 0, location = Self)] - fun make_sure_zero_coin_fails() { - let coin = MyCoin { value: 0 }; - let MyCoin { value: _ } = make_sure_non_zero_coin(coin); - } - - #[test_only] // test only helper function - fun publish_coin(account: &signer) { - move_to(account, MyCoin { value: 1 }) - } - - #[test(a = @0x1, b = @0x2)] - fun test_has_coin(a: signer, b: signer) { - publish_coin(&a); - publish_coin(&b); - assert!(has_coin(@0x1), 0); - assert!(has_coin(@0x2), 1); - assert!(!has_coin(@0x3), 1); - } -} -``` - -### Running Tests - -You can then run these tests with the `aptos move test` command: - -``` -$ aptos move test -BUILDING MoveStdlib -BUILDING TestExample -Running Move unit tests -[ PASS ] 0x1::my_module::make_sure_non_zero_coin_passes -[ PASS ] 0x1::my_module::make_sure_zero_coin_fails -[ PASS ] 0x1::my_module::test_has_coin -Test result: OK. Total tests: 3; passed: 3; failed: 0 -``` - -### Using Test Flags - -#### `-f ` or `--filter ` -This will only run tests whose fully qualified name contains ``. For example if we wanted to only run tests with `"zero_coin"` in their name: - -``` -$ aptos move test -f zero_coin -CACHED MoveStdlib -BUILDING TestExample -Running Move unit tests -[ PASS ] 0x1::my_module::make_sure_non_zero_coin_passes -[ PASS ] 0x1::my_module::make_sure_zero_coin_fails -Test result: OK. Total tests: 2; passed: 2; failed: 0 -``` - -#### `--coverage` -This will compute code being covered by test cases and generate coverage summary. - -``` -$ aptos move test --coverage -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING TestExample -Running Move unit tests -[ PASS ] 0x1::my_module::make_sure_non_zero_coin_passes -[ PASS ] 0x1::my_module::make_sure_zero_coin_fails -[ PASS ] 0x1::my_module::test_has_coin -Test result: OK. Total tests: 3; passed: 3; failed: 0 -+-------------------------+ -| Move Coverage Summary | -+-------------------------+ -Module 0000000000000000000000000000000000000000000000000000000000000001::my_module ->>> % Module coverage: 100.00 -+-------------------------+ -| % Move Coverage: 100.00 | -+-------------------------+ -Please use `aptos move coverage -h` for more detailed source or bytecode test coverage of this package -``` - -Then by running `aptos move coverage`, we can get more detailed coverage information. These can be found using the help flag: - -``` -$ aptos move coverage -h -``` diff --git a/developer-docs-site/docs/move/book/uses.md b/developer-docs-site/docs/move/book/uses.md deleted file mode 100644 index 5e7edf8d9fc76..0000000000000 --- a/developer-docs-site/docs/move/book/uses.md +++ /dev/null @@ -1,359 +0,0 @@ -# Uses and Aliases - -The `use` syntax can be used to create aliases to members in other modules. `use` can be used to -create aliases that last either for the entire module, or for a given expression block scope. - -## Syntax - -There are several different syntax cases for `use`. Starting with the most simple, we have the -following for creating aliases to other modules - -```move -use
::; -use
:: as ; -``` - -For example - -```move -use std::vector; -use std::vector as V; -``` - -`use std::vector;` introduces an alias `vector` for `std::vector`. This means that anywhere you -would want to use the module name `std::vector` (assuming this `use` is in scope), you could use -`vector` instead. `use std::vector;` is equivalent to `use std::vector as vector;` - -Similarly `use std::vector as V;` would let you use `V` instead of `std::vector` - -```move= -use std::vector; -use std::vector as V; - -fun new_vecs(): (vector, vector, vector) { - let v1 = std::vector::empty(); - let v2 = vector::empty(); - let v3 = V::empty(); - (v1, v2, v3) -} -``` - -If you want to import a specific module member (such as a function, struct, or constant). You can -use the following syntax. - -```move -use
::::; -use
:::: as ; -``` - -For example - -```move -use std::vector::empty; -use std::vector::empty as empty_vec; -``` - -This would let you use the function `std::vector::empty` without full qualification. Instead you -could use `empty` and `empty_vec` respectively. Again, `use std::vector::empty;` is equivalent to -`use std::vector::empty as empty;` - -```move= -use std::vector::empty; -use std::vector::empty as empty_vec; - -fun new_vecs(): (vector, vector, vector) { - let v1 = std::vector::empty(); - let v2 = empty(); - let v3 = empty_vec(); - (v1, v2, v3) -} -``` - -If you want to add aliases for multiple module members at once, you can do so with the following -syntax - -```move -use
::::{, as ... }; -``` - -For example - -```move= -use std::vector::{push_back, length as len, pop_back}; - -fun swap_last_two(v: &mut vector) { - assert!(len(v) >= 2, 42); - let last = pop_back(v); - let second_to_last = pop_back(v); - push_back(v, last); - push_back(v, second_to_last) -} -``` - -If you need to add an alias to the Module itself in addition to module members, you can do that in a -single `use` using `Self`. `Self` is a member of sorts that refers to the module. - -```move -use std::vector::{Self, empty}; -``` - -For clarity, all of the following are equivalent: - -```move -use std::vector; -use std::vector as vector; -use std::vector::Self; -use std::vector::Self as vector; -use std::vector::{Self}; -use std::vector::{Self as vector}; -``` - -If needed, you can have as many aliases for any item as you like - -```move= -use std::vector::{ - Self, - Self as V, - length, - length as len, -}; - -fun pop_twice(v: &mut vector): (T, T) { - // all options available given the `use` above - assert!(vector::length(v) > 1, 42); - assert!(V::length(v) > 1, 42); - assert!(length(v) > 1, 42); - assert!(len(v) > 1, 42); - - (vector::pop_back(v), vector::pop_back(v)) -} -``` - -## Inside a `module` - -Inside of a `module` all `use` declarations are usable regardless of the order of declaration. - -```move= -address 0x42 { -module example { - use std::vector; - - fun example(): vector { - let v = empty(); - vector::push_back(&mut v, 0); - vector::push_back(&mut v, 10); - v - } - - use std::vector::empty; -} -} -``` - -The aliases declared by `use` in the module usable within that module. - -Additionally, the aliases introduced cannot conflict with other module members. See -[Uniqueness](#uniqueness) for more details - -## Inside an expression - -You can add `use` declarations to the beginning of any expression block - -```move= -address 0x42 { -module example { - - fun example(): vector { - use std::vector::{empty, push_back}; - - let v = empty(); - push_back(&mut v, 0); - push_back(&mut v, 10); - v - } -} -} -``` - -As with `let`, the aliases introduced by `use` in an expression block are removed at the end of that -block. - -```move= -address 0x42 { -module example { - - fun example(): vector { - let result = { - use std::vector::{empty, push_back}; - let v = empty(); - push_back(&mut v, 0); - push_back(&mut v, 10); - v - }; - result - } - -} -} -``` - -Attempting to use the alias after the block ends will result in an error - -```move= -fun example(): vector { - let result = { - use std::vector::{empty, push_back}; - let v = empty(); - push_back(&mut v, 0); - push_back(&mut v, 10); - v - }; - let v2 = empty(); // ERROR! -// ^^^^^ unbound function 'empty' - result -} -``` - -Any `use` must be the first item in the block. If the `use` comes after any expression or `let`, it -will result in a parsing error - -```move= -{ - let x = 0; - use std::vector; // ERROR! - let v = vector::empty(); -} -``` - -## Naming rules - -Aliases must follow the same rules as other module members. This means that aliases to structs or -constants must start with `A` to `Z` - -```move= -address 0x42 { -module data { - struct S {} - const FLAG: bool = false; - fun foo() {} -} -module example { - use 0x42::data::{ - S as s, // ERROR! - FLAG as fLAG, // ERROR! - foo as FOO, // valid - foo as bar, // valid - }; -} -} -``` - -## Uniqueness - -Inside a given scope, all aliases introduced by `use` declarations must be unique. - -For a module, this means aliases introduced by `use` cannot overlap - -```move= -address 0x42 { -module example { - - use std::vector::{empty as foo, length as foo}; // ERROR! - // ^^^ duplicate 'foo' - - use std::vector::empty as bar; - - use std::vector::length as bar; // ERROR! - // ^^^ duplicate 'bar' - -} -} -``` - -And, they cannot overlap with any of the module's other members - -```move= -address 0x42 { -module data { - struct S {} -} -module example { - use 0x42::data::S; - - struct S { value: u64 } // ERROR! - // ^ conflicts with alias 'S' above -} -} -``` - -Inside of an expression block, they cannot overlap with each other, but they can -[shadow](#shadowing) other aliases or names from an outer scope - -## Shadowing - -`use` aliases inside of an expression block can shadow names (module members or aliases) from the -outer scope. As with shadowing of locals, the shadowing ends at the end of the expression block; - -```move= -address 0x42 { -module example { - - struct WrappedVector { vec: vector } - - fun empty(): WrappedVector { - WrappedVector { vec: std::vector::empty() } - } - - fun example1(): (WrappedVector, WrappedVector) { - let vec = { - use std::vector::{empty, push_back}; - // 'empty' now refers to std::vector::empty - - let v = empty(); - push_back(&mut v, 0); - push_back(&mut v, 1); - push_back(&mut v, 10); - v - }; - // 'empty' now refers to Self::empty - - (empty(), WrappedVector { vec }) - } - - fun example2(): (WrappedVector, WrappedVector) { - use std::vector::{empty, push_back}; - let w: WrappedVector = { - use 0x42::example::empty; - empty() - }; - push_back(&mut w.vec, 0); - push_back(&mut w.vec, 1); - push_back(&mut w.vec, 10); - - let vec = empty(); - push_back(&mut vec, 0); - push_back(&mut vec, 1); - push_back(&mut vec, 10); - - (w, WrappedVector { vec }) - } -} -} -``` - -## Unused Use or Alias - -An unused `use` will result in an error - -```move= -address 0x42 { -module example { - use std::vector::{empty, push_back}; // ERROR! - // ^^^^^^^^^ unused alias 'push_back' - - fun example(): vector { - empty() - } -} -} -``` diff --git a/developer-docs-site/docs/move/book/variables.md b/developer-docs-site/docs/move/book/variables.md deleted file mode 100644 index 0a8ceded383f4..0000000000000 --- a/developer-docs-site/docs/move/book/variables.md +++ /dev/null @@ -1,756 +0,0 @@ -# Local Variables and Scope - -Local variables in Move are lexically (statically) scoped. New variables are introduced with the -keyword `let`, which will shadow any previous local with the same name. Locals are mutable and can -be updated both directly and via a mutable reference. - -## Declaring Local Variables - -### `let` bindings - -Move programs use `let` to bind variable names to values: - -```move -let x = 1; -let y = x + x: -``` - -`let` can also be used without binding a value to the local. - -```move -let x; -``` - -The local can then be assigned a value later. - -```move -let x; -if (cond) { - x = 1 -} else { - x = 0 -} -``` - -This can be very helpful when trying to extract a value from a loop when a default value cannot be -provided. - -```move -let x; -let cond = true; -let i = 0; -loop { - (x, cond) = foo(i); - if (!cond) break; - i = i + 1; -} -``` - -### Variables must be assigned before use - -Move's type system prevents a local variable from being used before it has been assigned. - -```move -let x; -x + x // ERROR! -``` - -```move -let x; -if (cond) x = 0; -x + x // ERROR! -``` - -```move -let x; -while (cond) x = 0; -x + x // ERROR! -``` - -### Valid variable names - -Variable names can contain underscores `_`, letters `a` to `z`, letters `A` to `Z`, and digits `0` -to `9`. Variable names must start with either an underscore `_` or a letter `a` through `z`. They -_cannot_ start with uppercase letters. - -```move -// all valid -let x = e; -let _x = e; -let _A = e; -let x0 = e; -let xA = e; -let foobar_123 = e; - -// all invalid -let X = e; // ERROR! -let Foo = e; // ERROR! -``` - -### Type annotations - -The type of a local variable can almost always be inferred by Move's type system. However, Move -allows explicit type annotations that can be useful for readability, clarity, or debuggability. The -syntax for adding a type annotation is: - -```move -let x: T = e; // "Variable x of type T is initialized to expression e" -``` - -Some examples of explicit type annotations: - -```move -address 0x42 { -module example { - - struct S { f: u64, g: u64 } - - fun annotated() { - let u: u8 = 0; - let b: vector = b"hello"; - let a: address = @0x0; - let (x, y): (&u64, &mut u64) = (&0, &mut 1); - let S { f, g: f2 }: S = S { f: 0, g: 1 }; - } -} -} -``` - -Note that the type annotations must always be to the right of the pattern: - -```move -let (x: &u64, y: &mut u64) = (&0, &mut 1); // ERROR! should be let (x, y): ... = -``` - -### When annotations are necessary - -In some cases, a local type annotation is required if the type system cannot infer the type. This -commonly occurs when the type argument for a generic type cannot be inferred. For example: - -```move -let _v1 = vector::empty(); // ERROR! -// ^^^^^^^^^^^^^^^ Could not infer this type. Try adding an annotation -let v2: vector = vector::empty(); // no error -``` - -In a rarer case, the type system might not be able to infer a type for divergent code (where all the -following code is unreachable). Both `return` and [`abort`](./abort-and-assert.md) are expressions -and can have any type. A [`loop`](./loops.md) has type `()` if it has a `break`, but if there is no -break out of the `loop`, it could have any type. If these types cannot be inferred, a type -annotation is required. For example, this code: - -```move -let a: u8 = return (); -let b: bool = abort 0; -let c: signer = loop (); - -let x = return (); // ERROR! -// ^ Could not infer this type. Try adding an annotation -let y = abort 0; // ERROR! -// ^ Could not infer this type. Try adding an annotation -let z = loop (); // ERROR! -// ^ Could not infer this type. Try adding an annotation -``` - -Adding type annotations to this code will expose other errors about dead code or unused local -variables, but the example is still helpful for understanding this problem. - -### Multiple declarations with tuples - -`let` can introduce more than one local at a time using tuples. The locals declared inside the -parenthesis are initialized to the corresponding values from the tuple. - -```move -let () = (); -let (x0, x1) = (0, 1); -let (y0, y1, y2) = (0, 1, 2); -let (z0, z1, z2, z3) = (0, 1, 2, 3); -``` - -The type of the expression must match the arity of the tuple pattern exactly. - -```move -let (x, y) = (0, 1, 2); // ERROR! -let (x, y, z, q) = (0, 1, 2); // ERROR! -``` - -You cannot declare more than one local with the same name in a single `let`. - -```move -let (x, x) = 0; // ERROR! -``` - -### Multiple declarations with structs - -`let` can also introduce more than one local at a time when destructuring (or matching against) a -struct. In this form, the `let` creates a set of local variables that are initialized to the values -of the fields from a struct. The syntax looks like this: - -```move -struct T { f1: u64, f2: u64 } -``` - -```move -let T { f1: local1, f2: local2 } = T { f1: 1, f2: 2 }; -// local1: u64 -// local2: u64 -``` - -Here is a more complicated example: - -```move -address 0x42 { -module example { - struct X { f: u64 } - struct Y { x1: X, x2: X } - - fun new_x(): X { - X { f: 1 } - } - - fun example() { - let Y { x1: X { f }, x2 } = Y { x1: new_x(), x2: new_x() }; - assert!(f + x2.f == 2, 42); - - let Y { x1: X { f: f1 }, x2: X { f: f2 } } = Y { x1: new_x(), x2: new_x() }; - assert!(f1 + f2 == 2, 42); - } -} -} -``` - -Fields of structs can serve double duty, identifying the field to bind _and_ the name of the -variable. This is sometimes referred to as punning. - -```move -let X { f } = e; -``` - -is equivalent to: - -```move -let X { f: f } = e; -``` - -As shown with tuples, you cannot declare more than one local with the same name in a single `let`. - -```move -let Y { x1: x, x2: x } = e; // ERROR! -``` - -### Destructuring against references - -In the examples above for structs, the bound value in the let was moved, destroying the struct value -and binding its fields. - -```move -struct T { f1: u64, f2: u64 } -``` - -```move -let T { f1: local1, f2: local2 } = T { f1: 1, f2: 2 }; -// local1: u64 -// local2: u64 -``` - -In this scenario the struct value `T { f1: 1, f2: 2 }` no longer exists after the `let`. - -If you wish instead to not move and destroy the struct value, you can borrow each of its fields. For -example: - -```move -let t = T { f1: 1, f2: 2 }; -let T { f1: local1, f2: local2 } = &t; -// local1: &u64 -// local2: &u64 -``` - -And similarly with mutable references: - -```move -let t = T { f1: 1, f2: 2 }; -let T { f1: local1, f2: local2 } = &mut t; -// local1: &mut u64 -// local2: &mut u64 -``` - -This behavior can also work with nested structs. - -```move -address 0x42 { -module example { - struct X { f: u64 } - struct Y { x1: X, x2: X } - - fun new_x(): X { - X { f: 1 } - } - - fun example() { - let y = Y { x1: new_x(), x2: new_x() }; - - let Y { x1: X { f }, x2 } = &y; - assert!(*f + x2.f == 2, 42); - - let Y { x1: X { f: f1 }, x2: X { f: f2 } } = &mut y; - *f1 = *f1 + 1; - *f2 = *f2 + 1; - assert!(*f1 + *f2 == 4, 42); - } -} -} -``` - -### Ignoring Values - -In `let` bindings, it is often helpful to ignore some values. Local variables that start with `_` -will be ignored and not introduce a new variable - -```move -fun three(): (u64, u64, u64) { - (0, 1, 2) -} -``` - -```move -let (x1, _, z1) = three(); -let (x2, _y, z2) = three(); -assert!(x1 + z1 == x2 + z2, 42); -``` - -This can be necessary at times as the compiler will error on unused local variables - -```move -let (x1, y, z1) = three(); // ERROR! -// ^ unused local 'y' -``` - -### General `let` grammar - -All of the different structures in `let` can be combined! With that we arrive at this general -grammar for `let` statements: - -> _let-binding_ → **let** _pattern-or-list_ _type-annotation__opt_ _initializer__opt_ - -> _pattern-or-list_ → _pattern_ | **(** _pattern-list_ **)** - -> _pattern-list_ → _pattern_ **,**_opt_ | _pattern_ **,** _pattern-list_ - -> _type-annotation_ → **:** _type_ - -> _initializer_ → **=** _expression_ - -The general term for the item that introduces the bindings is a _pattern_. The pattern serves to -both destructure data (possibly recursively) and introduce the bindings. The pattern grammar is as -follows: - -> _pattern_ → _local-variable_ | _struct-type_ **{** _field-binding-list_ **}** - -> _field-binding-list_ → _field-binding_ **,**_opt_ | _field-binding_ **,** _field-binding-list_ - -> _field-binding_ → _field_ | _field_ **:** _pattern_ - -A few concrete examples with this grammar applied: - -```move - let (x, y): (u64, u64) = (0, 1); -// ^ local-variable -// ^ pattern -// ^ local-variable -// ^ pattern -// ^ pattern-list -// ^^^^ pattern-list -// ^^^^^^ pattern-or-list -// ^^^^^^^^^^^^ type-annotation -// ^^^^^^^^ initializer -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ let-binding - - let Foo { f, g: x } = Foo { f: 0, g: 1 }; -// ^^^ struct-type -// ^ field -// ^ field-binding -// ^ field -// ^ local-variable -// ^ pattern -// ^^^^ field-binding -// ^^^^^^^ field-binding-list -// ^^^^^^^^^^^^^^^ pattern -// ^^^^^^^^^^^^^^^ pattern-or-list -// ^^^^^^^^^^^^^^^^^^^^ initializer -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ let-binding -``` - -## Mutations - -### Assignments - -After the local is introduced (either by `let` or as a function parameter), the local can be -modified via an assignment: - -```move -x = e -``` - -Unlike `let` bindings, assignments are expressions. In some languages, assignments return the value -that was assigned, but in Move, the type of any assignment is always `()`. - -```move -(x = e: ()) -``` - -Practically, assignments being expressions means that they can be used without adding a new -expression block with braces (`{`...`}`). - -```move -let x = 0; -if (cond) x = 1 else x = 2; -``` - -The assignment uses the same pattern syntax scheme as `let` bindings: - -```move -address 0x42 { -module example { - struct X { f: u64 } - - fun new_x(): X { - X { f: 1 } - } - - // This example will complain about unused variables and assignments. - fun example() { - let (x, _, z) = (0, 1, 3); - let (x, y, f, g); - - (X { f }, X { f: x }) = (new_x(), new_x()); - assert!(f + x == 2, 42); - - (x, y, z, f, _, g) = (0, 0, 0, 0, 0, 0); - } -} -} -``` - -Note that a local variable can only have one type, so the type of the local cannot change between -assignments. - -```move -let x; -x = 0; -x = false; // ERROR! -``` - -### Mutating through a reference - -In addition to directly modifying a local with assignment, a local can be modified via a mutable -reference `&mut`. - -```move -let x = 0; -let r = &mut x; -*r = 1; -assert!(x == 1, 42); -``` - -This is particularly useful if either: - -(1) You want to modify different variables depending on some condition. - -```move -let x = 0; -let y = 1; -let r = if (cond) &mut x else &mut y; -*r = *r + 1; -``` - -(2) You want another function to modify your local value. - -```move -let x = 0; -modify_ref(&mut x); -``` - -This sort of modification is how you modify structs and vectors! - -```move -let v = vector::empty(); -vector::push_back(&mut v, 100); -assert!(*vector::borrow(&v, 0) == 100, 42); -``` - -For more details, see [Move references](./references.md). - -## Scopes - -Any local declared with `let` is available for any subsequent expression, _within that scope_. -Scopes are declared with expression blocks, `{`...`}`. - -Locals cannot be used outside of the declared scope. - -```move -let x = 0; -{ - let y = 1; -}; -x + y // ERROR! -// ^ unbound local 'y' -``` - -But, locals from an outer scope _can_ be used in a nested scope. - -```move -{ - let x = 0; - { - let y = x + 1; // valid - } -} -``` - -Locals can be mutated in any scope where they are accessible. That mutation survives with the local, -regardless of the scope that performed the mutation. - -```move -let x = 0; -x = x + 1; -assert!(x == 1, 42); -{ - x = x + 1; - assert!(x == 2, 42); -}; -assert!(x == 2, 42); -``` - -### Expression Blocks - -An expression block is a series of statements separated by semicolons (`;`). The resulting value of -an expression block is the value of the last expression in the block. - -```move -{ let x = 1; let y = 1; x + y } -``` - -In this example, the result of the block is `x + y`. - -A statement can be either a `let` declaration or an expression. Remember that assignments (`x = e`) -are expressions of type `()`. - -```move -{ let x; let y = 1; x = 1; x + y } -``` - -Function calls are another common expression of type `()`. Function calls that modify data are -commonly used as statements. - -```move -{ let v = vector::empty(); vector::push_back(&mut v, 1); v } -``` - -This is not just limited to `()` types---any expression can be used as a statement in a sequence! - -```move -{ - let x = 0; - x + 1; // value is discarded - x + 2; // value is discarded - b"hello"; // value is discarded -} -``` - -But! If the expression contains a resource (a value without the `drop` [ability](./abilities.md)), -you will get an error. This is because Move's type system guarantees that any value that is dropped -has the `drop` [ability](./abilities.md). (Ownership must be transferred or the value must be -explicitly destroyed within its declaring module.) - -```move -{ - let x = 0; - Coin { value: x }; // ERROR! -// ^^^^^^^^^^^^^^^^^ unused value without the `drop` ability - x -} -``` - -If a final expression is not present in a block---that is, if there is a trailing semicolon `;`, -there is an implicit [unit `()` value](https://en.wikipedia.org/wiki/Unit_type). Similarly, if the expression block is empty, there is an -implicit unit `()` value. - -```move -// Both are equivalent -{ x = x + 1; 1 / x; } -{ x = x + 1; 1 / x; () } -``` - -```move -// Both are equivalent -{ } -{ () } -``` - -An expression block is itself an expression and can be used anyplace an expression is used. (Note: -The body of a function is also an expression block, but the function body cannot be replaced by -another expression.) - -```move -let my_vector: vector> = { - let v = vector::empty(); - vector::push_back(&mut v, b"hello"); - vector::push_back(&mut v, b"goodbye"); - v -}; -``` - -(The type annotation is not needed in this example and only added for clarity.) - -### Shadowing - -If a `let` introduces a local variable with a name already in scope, that previous variable can no -longer be accessed for the rest of this scope. This is called _shadowing_. - -```move -let x = 0; -assert!(x == 0, 42); - -let x = 1; // x is shadowed -assert!(x == 1, 42); -``` - -When a local is shadowed, it does not need to retain the same type as before. - -```move -let x = 0; -assert!(x == 0, 42); - -let x = b"hello"; // x is shadowed -assert!(x == b"hello", 42); -``` - -After a local is shadowed, the value stored in the local still exists, but will no longer be -accessible. This is important to keep in mind with values of types without the -[`drop` ability](./abilities.md), as ownership of the value must be transferred by the end of the -function. - -```move -address 0x42 { - module example { - struct Coin has store { value: u64 } - - fun unused_resource(): Coin { - let x = Coin { value: 0 }; // ERROR! -// ^ This local still contains a value without the `drop` ability - x.value = 1; - let x = Coin { value: 10 }; - x -// ^ Invalid return - } - } -} -``` - -When a local is shadowed inside a scope, the shadowing only remains for that scope. The shadowing is -gone once that scope ends. - -```move -let x = 0; -{ - let x = 1; - assert!(x == 1, 42); -}; -assert!(x == 0, 42); -``` - -Remember, locals can change type when they are shadowed. - -```move -let x = 0; -{ - let x = b"hello"; - assert!(x = b"hello", 42); -}; -assert!(x == 0, 42); -``` - -## Move and Copy - -All local variables in Move can be used in two ways, either by `move` or `copy`. If one or the other -is not specified, the Move compiler is able to infer whether a `copy` or a `move` should be used. -This means that in all of the examples above, a `move` or a `copy` would be inserted by the -compiler. A local variable cannot be used without the use of `move` or `copy`. - -`copy` will likely feel the most familiar coming from other programming languages, as it creates a -new copy of the value inside of the variable to use in that expression. With `copy`, the local -variable can be used more than once. - -```move -let x = 0; -let y = copy x + 1; -let z = copy x + 2; -``` - -Any value with the `copy` [ability](./abilities.md) can be copied in this way. - -`move` takes the value out of the local variable _without_ copying the data. After a `move` occurs, -the local variable is unavailable. - -```move -let x = 1; -let y = move x + 1; -// ------ Local was moved here -let z = move x + 2; // Error! -// ^^^^^^ Invalid usage of local 'x' -y + z -``` - -### Safety - -Move's type system will prevent a value from being used after it is moved. This is the same safety -check described in [`let` declaration](#let-bindings) that prevents local variables from being used -before it is assigned a value. - - - -### Inference - -As mentioned above, the Move compiler will infer a `copy` or `move` if one is not indicated. The -algorithm for doing so is quite simple: - -- Any value with the `copy` [ability](./abilities.md) is given a `copy`. -- Any reference (both mutable `&mut` and immutable `&`) is given a `copy`. - - Except under special circumstances where it is made a `move` for predictable borrow checker - errors. -- Any other value is given a `move`. -- If the compiler can prove that the source value with copy ability is not used after the - assignment, then a move may be used instead of a copy for performance, but this will be invisible - to the programmer (except in possible decreased time or gas cost). - -For example: - -```move -struct Foo { - f: u64 -} - -struct Coin has copy { - value: u64 -} - -let s = b"hello"; -let foo = Foo { f: 0 }; -let coin = Coin { value: 0 }; - -let s2 = s; // copy -let foo2 = foo; // move -let coin2 = coin; // copy - -let x = 0; -let b = false; -let addr = @0x42; -let x_ref = &x; -let coin_ref = &mut coin2; - -let x2 = x; // copy -let b2 = b; // copy -let addr2 = @0x42; // copy -let x_ref2 = x_ref; // copy -let coin_ref2 = coin_ref; // copy -``` diff --git a/developer-docs-site/docs/move/book/vector.md b/developer-docs-site/docs/move/book/vector.md deleted file mode 100644 index 95d7474262cf9..0000000000000 --- a/developer-docs-site/docs/move/book/vector.md +++ /dev/null @@ -1,168 +0,0 @@ -# Vector - -`vector` is the only primitive collection type provided by Move. A `vector` is a homogenous -collection of `T`'s that can grow or shrink by pushing/popping values off the "end". - -A `vector` can be instantiated with any type `T`. For example, `vector`, `vector
`, -`vector<0x42::MyModule::MyResource>`, and `vector>` are all valid vector types. - -## Literals - -### General `vector` Literals - -Vectors of any type can be created with `vector` literals. - -| Syntax | Type | Description | -| --------------------- | ----------------------------------------------------------------------------- | ------------------------------------------ | -| `vector[]` | `vector[]: vector` where `T` is any single, non-reference type | An empty vector | -| `vector[e1, ..., en]` | `vector[e1, ..., en]: vector` where `e_i: T` s.t. `0 < i <= n` and `n > 0` | A vector with `n` elements (of length `n`) | - -In these cases, the type of the `vector` is inferred, either from the element type or from the -vector's usage. If the type cannot be inferred, or simply for added clarity, the type can be -specified explicitly: - -```move -vector[]: vector -vector[e1, ..., en]: vector -``` - -#### Example Vector Literals - -```move -(vector[]: vector); -(vector[0u8, 1u8, 2u8]: vector); -(vector[]: vector); -(vector
[@0x42, @0x100]: vector
); -``` - -### `vector` literals - -A common use-case for vectors in Move is to represent "byte arrays", which are represented with -`vector`. These values are often used for cryptographic purposes, such as a public key or a hash -result. These values are so common that specific syntax is provided to make the values more -readable, as opposed to having to use `vector[]` where each individual `u8` value is specified in -numeric form. - -There are currently two supported types of `vector` literals, *byte strings* and *hex strings*. - -#### Byte Strings - -Byte strings are quoted string literals prefixed by a `b`, e.g. `b"Hello!\n"`. - -These are ASCII encoded strings that allow for escape sequences. Currently, the supported escape -sequences are: - -| Escape Sequence | Description | -| --------------- | ---------------------------------------------- | -| `\n` | New line (or Line feed) | -| `\r` | Carriage return | -| `\t` | Tab | -| `\\` | Backslash | -| `\0` | Null | -| `\"` | Quote | -| `\xHH` | Hex escape, inserts the hex byte sequence `HH` | - -#### Hex Strings - -Hex strings are quoted string literals prefixed by a `x`, e.g. `x"48656C6C6F210A"`. - -Each byte pair, ranging from `00` to `FF`, is interpreted as hex encoded `u8` value. So each byte -pair corresponds to a single entry in the resulting `vector`. - -#### Example String Literals - -```move -script { -fun byte_and_hex_strings() { - assert!(b"" == x"", 0); - assert!(b"Hello!\n" == x"48656C6C6F210A", 1); - assert!(b"\x48\x65\x6C\x6C\x6F\x21\x0A" == x"48656C6C6F210A", 2); - assert!( - b"\"Hello\tworld!\"\n \r \\Null=\0" == - x"2248656C6C6F09776F726C6421220A200D205C4E756C6C3D00", - 3 - ); -} -} -``` - -## Operations - -`vector` provides several operations via the `std::vector` module in the Move standard -library, as shown below. More operations may be added over time. -Up-to-date document on `vector` can be found [here](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/move-stdlib/doc/vector.md#0x1_vector). - - -| Function | Description | Aborts? | -|--------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| -| `vector::empty(): vector` | Create an empty vector that can store values of type `T` | Never | -| `vector::is_empty(): bool` | Return `true` if the vector `v` has no elements and `false` otherwise. | Never | -| `vector::singleton(t: T): vector` | Create a vector of size 1 containing `t` | Never | -| `vector::length(v: &vector): u64` | Return the length of the vector `v` | Never | -| `vector::push_back(v: &mut vector, t: T)` | Add `t` to the end of `v` | Never | -| `vector::pop_back(v: &mut vector): T` | Remove and return the last element in `v` | If `v` is empty | -| `vector::borrow(v: &vector, i: u64): &T` | Return an immutable reference to the `T` at index `i` | If `i` is not in bounds | -| `vector::borrow_mut(v: &mut vector, i: u64): &mut T` | Return a mutable reference to the `T` at index `i` | If `i` is not in bounds | -| `vector::destroy_empty(v: vector)` | Delete `v` | If `v` is not empty | -| `vector::append(v1: &mut vector, v2: vector)` | Add the elements in `v2` to the end of `v1` | Never | -| `vector::reverse_append(lhs: &mut vector, other: vector)` | Pushes all of the elements of the `other` vector into the `lhs` vector, in the reverse order as they occurred in `other` | Never | -| `vector::contains(v: &vector, e: &T): bool` | Return true if `e` is in the vector `v`. Otherwise, returns false | Never | -| `vector::swap(v: &mut vector, i: u64, j: u64)` | Swaps the elements at the `i`th and `j`th indices in the vector `v` | If `i` or `j` is out of bounds | -| `vector::reverse(v: &mut vector)` | Reverses the order of the elements in the vector `v` in place | Never | -| `vector::reverse_slice(v: &mut vector, l: u64, r: u64)` | Reverses the order of the elements [l, r) in the vector `v` in place | Never | -| `vector::index_of(v: &vector, e: &T): (bool, u64)` | Return `(true, i)` if `e` is in the vector `v` at index `i`. Otherwise, returns `(false, 0)` | Never | -| `vector::insert(v: &mut vector, i: u64, e: T)` | Insert a new element `e` at position 0 <= i <= length, using O(length - i) time | If `i` is out of bounds | -| `vector::remove(v: &mut vector, i: u64): T` | Remove the `i`th element of the vector `v`, shifting all subsequent elements. This is O(n) and preserves ordering of elements in the vector | If `i` is out of bounds | -| `vector::swap_remove(v: &mut vector, i: u64): T` | Swap the `i`th element of the vector `v` with the last element and then pop the element, This is O(1), but does not preserve ordering of elements in the vector | If `i` is out of bounds | -| `vector::trim(v: &mut vector, new_len: u64): u64` | Trim the vector `v` to the smaller size `new_len` and return the evicted elements in order | `new_len` is larger than the length of `v` | -| `vector::trim_reverse(v: &mut vector, new_len: u64): u64` | Trim the vector `v` to the smaller size `new_len` and return the evicted elements in the reverse order | `new_len` is larger than the length of `v` | -| `vector::rotate(v: &mut vector, rot: u64): u64` | rotate(&mut [1, 2, 3, 4, 5], 2) -> [3, 4, 5, 1, 2] in place, returns the split point ie. 3 in this example | Never | -| `vector::rotate_slice(v: &mut vector, left: u64, rot: u64, right: u64): u64` | rotate a slice [left, right) with left <= rot <= right in place, returns the split point | Never | - -## Example - -```move -use std::vector; - -let v = vector::empty(); -vector::push_back(&mut v, 5); -vector::push_back(&mut v, 6); - -assert!(*vector::borrow(&v, 0) == 5, 42); -assert!(*vector::borrow(&v, 1) == 6, 42); -assert!(vector::pop_back(&mut v) == 6, 42); -assert!(vector::pop_back(&mut v) == 5, 42); -``` - -## Destroying and copying `vector`s - -Some behaviors of `vector` depend on the abilities of the element type, `T`. For example, vectors -containing elements that do not have `drop` cannot be implicitly discarded like `v` in the example -above--they must be explicitly destroyed with `vector::destroy_empty`. - -Note that `vector::destroy_empty` will abort at runtime unless `vec` contains zero elements: - -```move -fun destroy_any_vector(vec: vector) { - vector::destroy_empty(vec) // deleting this line will cause a compiler error -} -``` - -But no error would occur for dropping a vector that contains elements with `drop`: - -```move -fun destroy_droppable_vector(vec: vector) { - // valid! - // nothing needs to be done explicitly to destroy the vector -} -``` - -Similarly, vectors cannot be copied unless the element type has `copy`. In other words, a -`vector` has `copy` if and only if `T` has `copy`. - -For more details see the sections on [type abilities](./abilities.md) and [generics](./generics.md). - -## Ownership - -As mentioned [above](#destroying-and-copying-vectors), `vector` values can be copied only if the -elements can be copied. \ No newline at end of file diff --git a/developer-docs-site/docs/move/move-on-aptos.md b/developer-docs-site/docs/move/move-on-aptos.md deleted file mode 100644 index 899c76567da85..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -title: "Move on Aptos" ---- - -# Learn the Move Language - -To begin your journey in developing Move, we provide the following resources: - -- [Your first move module](../tutorials/first-move-module.md) to walks you through compiling, deploying, and interacting with Move. -- [Aptos Move Book](./book/SUMMARY.md) to teach you many of the general Move concepts. -- [The Move tutorial](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/move-tutorial) to cover the basics of programming with Move. -- [Aptos Move Examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples) demonstrating many different aspects of Move especially those unique to Aptos. -- [Aptos Move Framework](../reference/move.md). -- [Move language channel](https://discord.com/channels/945856774056083548/955573698868432896) in [Aptos Discord](https://discord.gg/aptosnetwork). - -There are several IDE plugins available for Aptos and the Move language: - -- [Aptos Move Analyzer](https://marketplace.visualstudio.com/items?itemName=MoveBit.aptos-move-analyzer) for Visual Studio.. -- [Remix IDE Plugin](../community/contributions/remix-ide-plugin.md): Offers a web-based development environment. It is a no-setup tool with a graphical interface for developing Move modules. -- [Move language plugin for Jetbrains IDEs](https://plugins.jetbrains.com/plugin/14721-move-language): Supports syntax highlighting, code navigation, renames, formatting, type checks and code generation. - -The following external resources exist to further your education: - -* [Teach yourself Move on Aptos](https://github.com/econia-labs/teach-yourself-move). -* [Formal Verification, the Move Language, and the Move Prover](https://www.certik.com/resources/blog/2wSOZ3mC55AB6CYol6Q2rP-formal-verification-the-move-language-and-the-move-prover) -* [IMCODING Move Tutorials](https://www.imcoding.online/tutorials?tag=Aptos) -* [Pontem Move Playground](https://playground.pontem.network/) -* [Collection of nestable Move resources](https://github.com/taoheorg/taohe) -* [Move-Lang tag on Stack Overflow](https://stackoverflow.com/questions/tagged/move-lang) diff --git a/developer-docs-site/docs/move/move-on-aptos/cli.md b/developer-docs-site/docs/move/move-on-aptos/cli.md deleted file mode 100644 index ba01baa257b8c..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/cli.md +++ /dev/null @@ -1,1542 +0,0 @@ ---- -title: "Aptos CLI" ---- - -import CodeBlock from '@theme/CodeBlock'; - -# Use the Aptos CLI - -The `aptos` tool is a command line interface (CLI) for developing on the Aptos blockchain, debugging, and for node operations. This document describes how to use the `aptos` CLI tool. To download or build the CLI, follow [Install Aptos CLI](../../tools/aptos-cli/install-cli/index.md). - -## Compiling Move - -The `aptos` CLI can be used to compile a Move package locally. -The below example uses the `HelloBlockchain` in [move-examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples). - -The named addresses can be either an account address, or a profile name. - -```bash -$ aptos move compile --package-dir aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=superuser -``` - -The above command will generate the below terminal output: -```bash -{ - "Result": [ - "742854F7DCA56EA6309B51E8CEBB830B12623F9C9D76C72C3242E4CAD353DEDC::Message" - ] -} -``` - -## Compiling and unit testing Move - -The `aptos` CLI can also be used to compile and run unit tests locally. -In this example, we'll use the `HelloBlockchain` in [move-examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples). - -```bash -$ aptos move test --package-dir aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=superuser -``` -The above command will generate the following terminal output: -```bash -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING Examples -Running Move unit tests -[ PASS ] 0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc::MessageTests::sender_can_set_message -[ PASS ] 0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc::Message::sender_can_set_message -Test result: OK. Total tests: 2; passed: 2; failed: 0 -{ - "Result": "Success" -} -``` -## Generating test coverage details for Move -The `aptos` CLI can be used to analyze and improve the testing of your Move modules. To use this feature: -1. In your `aptos-core` source checkout, navigate to the `aptos-move/framework/move-stdlib` directory. -2. Execute the command: - ```bash - $ aptos move test --coverage - ``` -3. Receive results in standard output containing the result for each test case followed by a basic coverage summary resembling: - ```bash - BUILDING MoveStdlib -Running Move unit tests -[ PASS ] 0x1::vector_tests::append_empties_is_empty -[ PASS ] 0x1::option_tests::borrow_mut_none -[ PASS ] 0x1::fixed_point32_tests::ceil_can_round_up_correctly -[ PASS ] 0x1::features::test_change_feature_txn -[ PASS ] 0x1::bcs_tests::bcs_bool -[ PASS ] 0x1::bit_vector_tests::empty_bitvector -[ PASS ] 0x1::option_tests::borrow_mut_some -Test result: OK. Total tests: 149; passed: 149; failed: 0 -+-------------------------+ -| Move Coverage Summary | -+-------------------------+ -Module 0000000000000000000000000000000000000000000000000000000000000001::bcs ->>> % Module coverage: NaN -Module 0000000000000000000000000000000000000000000000000000000000000001::fixed_point32 ->>> % Module coverage: 100.00 -Module 0000000000000000000000000000000000000000000000000000000000000001::hash ->>> % Module coverage: NaN -Module 0000000000000000000000000000000000000000000000000000000000000001::vector ->>> % Module coverage: 92.19 -Module 0000000000000000000000000000000000000000000000000000000000000001::error ->>> % Module coverage: 0.00 -Module 0000000000000000000000000000000000000000000000000000000000000001::acl ->>> % Module coverage: 0.00 -Module 0000000000000000000000000000000000000000000000000000000000000001::bit_vector ->>> % Module coverage: 97.32 -Module 0000000000000000000000000000000000000000000000000000000000000001::signer ->>> % Module coverage: 100.00 -Module 0000000000000000000000000000000000000000000000000000000000000001::features ->>> % Module coverage: 69.41 -Module 0000000000000000000000000000000000000000000000000000000000000001::option ->>> % Module coverage: 100.00 -Module 0000000000000000000000000000000000000000000000000000000000000001::string ->>> % Module coverage: 81.82 -+-------------------------+ -| % Move Coverage: 83.50 | -+-------------------------+ -Please use `aptos move coverage -h` for more detailed test coverage of this package -{ - "Result": "Success" -} - ``` - -4. Optionally, narrow down your test runs and results to a specific package name with the `--filter` option, like so: - ```bash - $ aptos move test --coverage --filter vector - ``` - - With results like: - ``` - BUILDING MoveStdlib - Running Move unit tests - [ PASS ] 0x1::bit_vector_tests::empty_bitvector - [ PASS ] 0x1::vector_tests::append_empties_is_empty - [ PASS ] 0x1::bit_vector_tests::index_bit_out_of_bounds - [ PASS ] 0x1::vector_tests::append_respects_order_empty_lhs - ``` -5. Run the `aptos move coverage` command to obtain more detailed coverage information. -6. Optionally, isolate the results to a module by passing its name to the `--module` option, for example: - ```bash - $ aptos move coverage source --module signer - ``` - - With results: - ``` - module std::signer { - // Borrows the address of the signer - // Conceptually, you can think of the `signer` as being a struct wrapper arround an - // address - // ``` - // struct signer has drop { addr: address } - // ``` - // `borrow_address` borrows this inner field - native public fun borrow_address(s: &signer): &address; - - // Copies the address of the signer - public fun address_of(s: &signer): address { - *borrow_address(s) - } - - /// Return true only if `s` is a transaction signer. This is a spec function only available in spec. - spec native fun is_txn_signer(s: signer): bool; - - /// Return true only if `a` is a transaction signer address. This is a spec function only available in spec. - spec native fun is_txn_signer_addr(a: address): bool; -} -{ - "Result": "Success" -} - ``` -6. Find failures and iteratively improve your testing and running these commands to eliminate gaps in your testing coverage. - -## Proving Move - -The `aptos` CLI can be used to run [Move Prover](../../move/prover/index.md), which is a formal verification tool for the Move language. The below example proves the `hello_prover` package in [move-examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples). -```bash -aptos move prove --package-dir aptos-move/move-examples/hello_prover/ -``` -The above command will generate the following terminal output: -```bash -SUCCESS proving 1 modules from package `hello_prover` in 1.649s -{ - "Result": "Success" -} -``` - -Move Prover may fail with the following terminal output if the dependencies are not installed and set up properly: -```bash -FAILURE proving 1 modules from package `hello_prover` in 0.067s -{ - "Error": "Move Prover failed: No boogie executable set. Please set BOOGIE_EXE" -} -``` -In this case, see [Install the dependencies of Move Prover](../../tools/aptos-cli/install-cli/index.md#step-3-optional-install-the-dependencies-of-move-prover). - -## Profiling gas usage - -Optionally, you can profile gas usage in the Aptos virtual machine locally rather than [simulating transactions](../../concepts/gas-txn-fee.md#estimating-the-gas-units-via-simulation) at the [fullnode](https://fullnode.devnet.aptoslabs.com/v1/spec#/operations/simulate_transaction). -This will generate a web-based gas report which you can view with your browser. - -To run the gas profiler, simply append the `--profile-gas` option to the Aptos CLI `move publish`, `move run` or `move run-script` command. -See [Gas Profiling](gas-profiling) for a full tutorial. - -## Debugging and printing stack trace - -In this example, we will use `DebugDemo` in [debug-move-example](https://github.com/aptos-labs/aptos-core/tree/main/crates/aptos/debug-move-example). - -Now, you can use `debug::print` and `debug::print_stack_trace` in your [DebugDemo Move file](https://github.com/aptos-labs/aptos-core/tree/main/crates/aptos/debug-move-example/sources/DebugDemo.move). - -You can run the following command: -```bash -$ aptos move test --package-dir crates/aptos/debug-move-example -``` - -The command will generate the following output: -```bash -Running Move unit tests -[debug] 0000000000000000000000000000000000000000000000000000000000000001 -Call Stack: - [0] 0000000000000000000000000000000000000000000000000000000000000001::Message::sender_can_set_message - - Code: - [4] CallGeneric(0) - [5] MoveLoc(0) - [6] LdConst(0) - > [7] Call(1) - [8] Ret - - Locals: - [0] - - [1] 0000000000000000000000000000000000000000000000000000000000000001 - - -Operand Stack: -``` - - -## Publishing a Move package with a named address - -In this example, we'll use the `HelloBlockchain` in [move-examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples). - -Publish the package with your account address set for `HelloBlockchain`. - -Here, you need to change 8946741e5c907c43c9e042b3739993f32904723f8e2d1491564d38959b59ac71 to your account address. -```bash -$ aptos move publish --package-dir aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=8946741e5c907c43c9e042b3739993f32904723f8e2d1491564d38959b59ac71 -``` - -:::tip -As an open source project, the source code as well as compiled code published to the Aptos blockchain is inherently open by default. This means code you upload may be downloaded from on-chain data. Even without source access, it is possible to regenerate Move source from Move bytecode. To disable source access, publish with the `--included-artifacts none` argument, like so: - -``` -aptos move publish --included-artifacts none -``` -::: - -You can additionally use named profiles for the addresses. The first placeholder is `default` -```bash -$ aptos move publish --package-dir aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=default -``` - -:::tip -When publishing Move modules, if multiple modules are in one package, then all the modules in this package must have the same account. If they have different accounts, then the publishing will fail at the transaction level. -::: - -## Running a Move function - -Now that you've published the function above, you can run it. - -Arguments must be given a type with a colon to separate it. In this example, we want the input to be -parsed as a string, so we put `string:Hello!`. - -```bash -$ aptos move run --function-id 0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb::message::set_message --args string:hello! -{ - "Result": { - "changes": [ - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "authentication_key": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "self_address": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "sequence_number": "3" - }, - "event": "write_resource", - "resource": "0x1::account::Account" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "coin": { - "value": "9777" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "2" - } - } - } - }, - "event": "write_resource", - "resource": "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "counter": "4" - }, - "event": "write_resource", - "resource": "0x1::guid::Generator" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "message": "hello!", - "message_change_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "3" - } - } - } - }, - "event": "write_resource", - "resource": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb::Message::MessageHolder" - } - ], - "gas_used": 41, - "success": true, - "version": 3488, - "vm_status": "Executed successfully" - } -} -``` - -Additionally, profiles can replace addresses in the function id. -```bash -$ aptos move run --function-id default::message::set_message --args string:hello! -{ - "Result": { - "changes": [ - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "authentication_key": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "self_address": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "sequence_number": "3" - }, - "event": "write_resource", - "resource": "0x1::account::Account" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "coin": { - "value": "9777" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "2" - } - } - } - }, - "event": "write_resource", - "resource": "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "counter": "4" - }, - "event": "write_resource", - "resource": "0x1::guid::Generator" - }, - { - "address": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "data": { - "message": "hello!", - "message_change_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "3" - } - } - } - }, - "event": "write_resource", - "resource": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb::Message::MessageHolder" - } - ], - "gas_used": 41, - "success": true, - "version": 3488, - "vm_status": "Executed successfully" - } -} -``` - -## Arguments in JSON - -### Package info - -This section references the [`CliArgs` example package](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/cli_args), which contains the following manifest: - -import move_toml from '!!raw-loader!../../../../aptos-move/move-examples/cli_args/Move.toml'; - -{move_toml} - -Here, the package is deployed under the named address `test_account`. - -:::tip -Set your working directory to [`aptos-move/move-examples/cli_args`](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/cli_args) to follow along: - -```bash -cd /aptos-core/aptos-move/move-examples/cli_args -``` -::: - -### Deploying the package - -Start by mining a vanity address for Ace, who will deploy the package: - - -```bash title=Command -aptos key generate \ - --vanity-prefix 0xace \ - --output-file ace.key -``` - -
Output - -```bash -{ - "Result": { - "Account Address:": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "PublicKey Path": "ace.key.pub", - "PrivateKey Path": "ace.key" - } -} -``` - -
- -:::tip -The exact account address should vary for each run, though the vanity prefix should not. -::: - -Store Ace's address in a shell variable so you can call it inline later on: - -```bash -# Your exact address should vary -ace_addr=0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46 -``` - -Fund Ace's account with the faucet (either devnet or testnet): - -```bash title=Command -aptos account fund-with-faucet --account $ace_addr -``` - -
Output - -```bash -{ - "Result": "Added 100000000 Octas to account acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46" -} -``` - -
- -Now publish the package under Ace's account: - -```bash title=Command -aptos move publish \ - --named-addresses test_account=$ace_addr \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x1d7b074dd95724c5459a1c30fe4cb3875e7b0478cc90c87c8e3f21381625bec1", - "gas_used": 1294, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 0, - "success": true, - "timestamp_us": 1685077849297587, - "version": 528422121, - "vm_status": "Executed successfully" - } -} -``` - -
- -### Entry functions - -The only module in the package, `cli_args.move`, defines a simple `Holder` resource with fields of various data types: - -```rust title="Holder in cli_args.move" -:!: static/move-examples/cli_args/sources/cli_args.move resource -``` - -A public entry function with multi-nested vectors can be used to set the fields: - -```rust title="Setter function in cli_args.move" -:!: static/move-examples/cli_args/sources/cli_args.move setter -``` - -After the package has been published, `aptos move run` can be used to call `set_vals()`: - -:::tip -To pass vectors (including nested vectors) as arguments from the command line, use JSON syntax escaped with quotes! -::: - -```bash title="Running function with nested vector arguments from CLI" -aptos move run \ - --function-id $ace_addr::cli_args::set_vals \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args \ - u8:123 \ - "hex:0x1234" \ - "string:hello, world\! ♥" \ - "bool:[false, true, false, false]" \ - 'address:[["0xace", "0xbee"], ["0xcad"], []]' \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x5e141dc6c28e86fa9f5594de93d07a014264ebadfb99be6db922a929eb1da24f", - "gas_used": 504, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 1, - "success": true, - "timestamp_us": 1685077888820037, - "version": 528422422, - "vm_status": "Executed successfully" - } -} -``` - -
- -The function ID, type arguments, and arguments can alternatively be specified in a JSON file: - -import entry_json_file from '!!raw-loader!../../../../aptos-move/move-examples/cli_args/entry_function_arguments.json'; - -{entry_json_file} - -Here, the call to `aptos move run` looks like: - -```bash title="Running function with JSON input file" -aptos move run \ - --json-file entry_function_arguments.json \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x60a32315bb48bf6d31629332f6b1a3471dd0cb016fdee8d0bb7dcd0be9833e60", - "gas_used": 3, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 2, - "success": true, - "timestamp_us": 1685077961499641, - "version": 528422965, - "vm_status": "Executed successfully" - } -} -``` - -
- -:::tip -If you are trying to run the example yourself don't forget to substitute Ace's actual address for `` in `entry_function_arguments.json`! -::: - -### View functions - -Once the values in a `Holder` have been set, the `reveal()` view function can be used to check the first three fields, and to compare type arguments against the last two fields: - -```rust title="View function" -:!: static/move-examples/cli_args/sources/cli_args.move view -``` - -This view function can be called with arguments specified either from the CLI or from a JSON file: - -```bash title="Arguments via CLI" -aptos move view \ - --function-id $ace_addr::cli_args::reveal \ - --type-args \ - 0x1::account::Account \ - 0x1::account::Account \ - --args address:$ace_addr -``` - -```bash title="Arguments via JSON file" -aptos move view --json-file view_function_arguments.json -``` - -:::tip -If you are trying to run the example yourself don't forget to substitute Ace's actual address for `` in `view_function_arguments.json` (twice)! -::: - -import view_json_file from '!!raw-loader!../../../../aptos-move/move-examples/cli_args/view_function_arguments.json'; - -{view_json_file} - -```bash title="Output" -{ - "Result": [ - { - "address_vec_vec": [ - [ - "0xace", - "0xbee" - ], - [ - "0xcad" - ], - [] - ], - "bool_vec": [ - false, - true, - false, - false - ], - "bytes": "0x1234", - "type_info_1_match": true, - "type_info_2_match": false, - "u8_solo": 123, - "utf8_string": "hello, world! ♥" - } - ] -} -``` - -### Script functions - -The package also contains a script, `set_vals.move`, which is a wrapper for the setter function: - -```rust title="script" -:!: static/move-examples/cli_args/scripts/set_vals.move script -``` - -First compile the package (this will compile the script): - -```bash title=Compilation -aptos move compile --named-addresses test_account=$ace_addr -``` - -
Output - -```bash -{ - "Result": [ - "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46::cli_args" - ] -} -``` - -
- -Next, run `aptos move run-script`: - -```bash title="Arguments via CLI" -aptos move run-script \ - --compiled-script-path build/CliArgs/bytecode_scripts/set_vals.mv \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args \ - u8:123 \ - "hex:0x1234" \ - "string:hello, world\! ♥" \ - "u8:[122, 123, 124, 125]" \ - address:"0xace" \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x1d644eba8187843cc43919469112339bc2c435a49a733ac813b7bc6c79770152", - "gas_used": 3, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 3, - "success": true, - "timestamp_us": 1685078415935612, - "version": 528426413, - "vm_status": "Executed successfully" - } -} -``` - -
- -```bash title="Arguments via JSON file" -aptos move run-script \ - --compiled-script-path build/CliArgs/bytecode_scripts/set_vals.mv \ - --json-file script_function_arguments.json \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x840e2d6a5ab80d5a570effb3665f775f1755e0fd8d76e52bfa7241aaade883d7", - "gas_used": 3, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 4, - "success": true, - "timestamp_us": 1685078516832128, - "version": 528427132, - "vm_status": "Executed successfully" - } -} -``` - -
- -import script_json_file from '!!raw-loader!../../../../aptos-move/move-examples/cli_args/script_function_arguments.json'; - -{script_json_file} - -Both such script function invocations result in the following `reveal()` view function output: - -```bash title="View function call" -aptos move view \ - --function-id $ace_addr::cli_args::reveal \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args address:$ace_addr -``` - -```json title="View function output" -{ - "Result": [ - { - "address_vec_vec": [ - [ - "0xace" - ] - ], - "bool_vec": [ - false, - false, - true, - true - ], - "bytes": "0x1234", - "type_info_1_match": true, - "type_info_2_match": true, - "u8_solo": 123, - "utf8_string": "hello, world! ♥" - } - ] -} -``` - -:::note -As of the time of this writing, the `aptos` CLI only supports script function arguments for vectors of type `u8`, and only up to a vector depth of 1. Hence `vector
` and `vector>` are invalid script function argument types. -::: - - -## Multisig governance - -### Background - -This section builds upon the [Arguments in JSON](#arguments-in-json) section, and likewise references the [`CliArgs` example package](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/cli_args). - -:::tip -If you would like to follow along, start by completing the [Arguments in JSON](#arguments-in-json) tutorial steps! -::: - -For this example, Ace and Bee will conduct governance operations from a 2-of-2 "multisig v2" account (an on-chain multisig account per [`multisig_account.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/multisig_account.move)) - -### Account creation - -Since Ace's account was created during the [Arguments in JSON](#arguments-in-json) tutorial, start by mining a vanity address account for Bee too: - -```bash title=Command -aptos key generate \ - --vanity-prefix 0xbee \ - --output-file bee.key -``` - -
Output - -```bash -{ - "Result": { - "PublicKey Path": "bee.key.pub", - "PrivateKey Path": "bee.key", - "Account Address:": "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc" - } -} -``` - -
- -:::tip -The exact account address should vary for each run, though the vanity prefix should not. -::: - -Store Bee's address in a shell variable so you can call it inline later on: - -```bash -# Your exact address should vary -bee_addr=0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc -``` - -Fund Bee's account using the faucet: - -```bash title=Command -aptos account fund-with-faucet --account $bee_addr -``` - -
Output - -```bash -{ - "Result": "Added 100000000 Octas to account beec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc" -} -``` - -
- -Ace can now create a multisig account: - -```bash title=Command -aptos multisig create \ - --additional-owners $bee_addr \ - --num-signatures-required 2 \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "multisig_address": "57478da34604655c68b1dcb89e4f4a9124b6c0ecc1c59a0931d58cc4e60ac5c5", - "transaction_hash": "0x849cc756de2d3b57210f5d32ae4b5e7d1f80e5d376233885944b6f3cc2124a05", - "gas_used": 1524, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 5, - "success": true, - "timestamp_us": 1685078644186194, - "version": 528428043, - "vm_status": "Executed successfully" - } -} -``` - -
- -Store the multisig address in a shell variable: - -```bash -# Your address should vary -multisig_addr=0x57478da34604655c68b1dcb89e4f4a9124b6c0ecc1c59a0931d58cc4e60ac5c5 -``` - -### Inspect the multisig - -Use the assorted [`multisig_account.move` view functions](https://github.com/aptos-labs/aptos-core/blob/9fa0102c3e474d99ea35a0a85c6893604be41611/aptos-move/framework/aptos-framework/sources/multisig_account.move#L237) to inspect the multisig: - -```bash title="Number of signatures required" -aptos move view \ - --function-id 0x1::multisig_account::num_signatures_required \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "2" - ] -} -``` - -
- -```bash title="Owners" -aptos move view \ - --function-id 0x1::multisig_account::owners \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - [ - "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46" - ] - ] -} -``` - -
- -```bash title="Last resolved sequence number" -aptos move view \ - --function-id 0x1::multisig_account::last_resolved_sequence_number \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "0" - ] -} -``` - -
- -```bash title="Next sequence number" -aptos move view \ - --function-id 0x1::multisig_account::next_sequence_number \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "1" - ] -} -``` - -
- -### Enqueue a publication transaction - -The first multisig transaction enqueued will be a transaction for publication of the [`CliArgs` example package](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/cli_args). -First, generate a publication payload entry function JSON file: - -```bash title="Command" -aptos move build-publish-payload \ - --named-addresses test_account=$multisig_addr \ - --json-output-file publication.json \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": "Publication payload entry function JSON file saved to publication.json" -} -``` - -
- -Now have Ace propose publication of the package from the multisig account, storing only the payload hash on-chain: - -```bash title="Command" -aptos multisig create-transaction \ - --multisig-address $multisig_addr \ - --json-file publication.json \ - --store-hash-only \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x70c75903f8e1b1c0069f1e84ef9583ad8000f24124b33a746c88d2b031f7fe2c", - "gas_used": 510, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 6, - "success": true, - "timestamp_us": 1685078836492390, - "version": 528429447, - "vm_status": "Executed successfully" - } -} -``` - -
- -Note that the last resolved sequence number is still 0 because no transactions have been resolved: - -```bash title="Last resolved sequence number" -aptos move view \ - --function-id 0x1::multisig_account::last_resolved_sequence_number \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "0" - ] -} -``` - -
- -However the next sequence number has been incremented because a transaction has been enqueued: - -```bash title="Next sequence number" -aptos move view \ - --function-id 0x1::multisig_account::next_sequence_number \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "2" - ] -} -``` - -
- -The multisig transaction enqueued on-chain can now be inspected: - -```bash title="Get transaction" -aptos move view \ - --function-id 0x1::multisig_account::get_transaction \ - --args \ - address:"$multisig_addr" \ - String:1 -``` - -
Output - -```bash -{ - "Result": [ - { - "creation_time_secs": "1685078836", - "creator": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "payload": { - "vec": [] - }, - "payload_hash": { - "vec": [ - "0x62b91159c1428c1ef488c7290771de458464bd665691d9653d195bc28e0d2080" - ] - }, - "votes": { - "data": [ - { - "key": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "value": true - } - ] - } - } - ] -} -``` - -
- -Note from the above result that no payload is stored on-chain, and that Ace implicitly approved the transaction (voted `true`) upon the submission of the proposal. - -### Enqueue a governance parameter transaction - -Now have Bee enqueue a governance parameter setter transaction, storing the entire transaction payload on-chain: - -```bash title="Command" -aptos multisig create-transaction \ - --multisig-address $multisig_addr \ - --function-id $multisig_addr::cli_args::set_vals \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args \ - u8:123 \ - "bool:[false, true, false, false]" \ - 'address:[["0xace", "0xbee"], ["0xcad"], []]' \ - --private-key-file bee.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0xd0a348072d5bfc5a2e5d444f92f0ecc10b978dad720b174303bc6d91342f27ec", - "gas_used": 511, - "gas_unit_price": 100, - "sender": "beec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "sequence_number": 0, - "success": true, - "timestamp_us": 1685078954841650, - "version": 528430315, - "vm_status": "Executed successfully" - } -} -``` - -
- -Note the next sequence number has been incremented again: - -```bash title="Next sequence number" -aptos move view \ - --function-id 0x1::multisig_account::next_sequence_number \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - "3" - ] -} -``` - -
- -Now both the publication and parameter transactions are pending: - -```bash title="Get pending transactions" -aptos move view \ - --function-id 0x1::multisig_account::get_pending_transactions \ - --args \ - address:"$multisig_addr" -``` - -
Output - -```bash -{ - "Result": [ - [ - { - "creation_time_secs": "1685078836", - "creator": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "payload": { - "vec": [] - }, - "payload_hash": { - "vec": [ - "0x62b91159c1428c1ef488c7290771de458464bd665691d9653d195bc28e0d2080" - ] - }, - "votes": { - "data": [ - { - "key": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "value": true - } - ] - } - }, - { - "creation_time_secs": "1685078954", - "creator": "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "payload": { - "vec": [ - "0x0057478da34604655c68b1dcb89e4f4a9124b6c0ecc1c59a0931d58cc4e60ac5c508636c695f61726773087365745f76616c7302070000000000000000000000000000000000000000000000000000000000000001076163636f756e74074163636f756e740007000000000000000000000000000000000000000000000000000000000000000108636861696e5f696407436861696e49640003017b0504000100006403020000000000000000000000000000000000000000000000000000000000000ace0000000000000000000000000000000000000000000000000000000000000bee010000000000000000000000000000000000000000000000000000000000000cad00" - ] - }, - "payload_hash": { - "vec": [] - }, - "votes": { - "data": [ - { - "key": "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "value": true - } - ] - } - } - ] - ] -} -``` - -
- -### Execute the publication transaction - -Since only Ace has voted on the publication transaction (which he implicitly approved upon proposing) the transaction can't be executed yet: - -```bash title="Can be executed" -aptos move view \ - --function-id 0x1::multisig_account::can_be_executed \ - --args \ - address:"$multisig_addr" \ - String:1 -``` - -
Output - -```bash -{ - "Result": [ - false - ] -} -``` - -
- -Before Bee votes, however, she verifies that the payload hash stored on-chain matches the publication entry function JSON file: - -```bash title="Verifying transaction proposal" -aptos multisig verify-proposal \ - --multisig-address $multisig_addr \ - --json-file publication.json \ - --sequence-number 1 -``` - -
Output - -```bash -{ - "Result": { - "Status": "Transaction match", - "Multisig transaction": { - "creation_time_secs": "1685078836", - "creator": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "payload": { - "vec": [] - }, - "payload_hash": { - "vec": [ - "0x62b91159c1428c1ef488c7290771de458464bd665691d9653d195bc28e0d2080" - ] - }, - "votes": { - "data": [ - { - "key": "0xacef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "value": true - } - ] - } - } - } -} -``` - -
- -Since Bee has verified that the on-chain payload hash checks out against her locally-compiled package publication JSON file, she votes yes: - - -```bash title="Approving transaction" -aptos multisig approve \ - --multisig-address $multisig_addr \ - --sequence-number 1 \ - --private-key-file bee.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0xa5fb49f1077de6aa6d976e6bcc05e4c50c6cd061f1c87e8f1ea74e7a04a06bd1", - "gas_used": 6, - "gas_unit_price": 100, - "sender": "beec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "sequence_number": 1, - "success": true, - "timestamp_us": 1685079892130861, - "version": 528437204, - "vm_status": "Executed successfully" - } -} -``` - -
- -Now the transaction can be executed: - -```bash title="Can be executed" -aptos move view \ - --function-id 0x1::multisig_account::can_be_executed \ - --args \ - address:"$multisig_addr" \ - String:1 -``` - -
Output - -```bash -{ - "Result": [ - true - ] -} -``` - -
- -Now either Ace or Bee can invoke the publication transaction from the multisig account, passing the full transaction payload since only the hash was stored on-chain: - -```bash title="Publication" -aptos multisig execute-with-payload \ - --multisig-address $multisig_addr \ - --json-file publication.json \ - --private-key-file bee.key \ - --max-gas 10000 \ - --assume-yes -``` - -:::tip -Pending the resolution of [#8304](https://github.com/aptos-labs/aptos-core/issues/8304), the transaction simulator (which is used to estimate gas costs) is broken for multisig transactions, so you will have to manually specify a max gas amount. -::: - -
Output - -Also pending the resolution of [#8304](https://github.com/aptos-labs/aptos-core/issues/8304), the CLI output for a successful multisig publication transaction execution results in an API error if only the payload hash has been stored on-chain, but the transaction can be manually verified using an explorer. - -
- -### Execute the governance parameter transaction - -Since only Bee has voted on the governance parameter transaction (which she implicitly approved upon proposing), the transaction can't be executed yet: - -```bash title="Can be executed" -aptos move view \ - --function-id 0x1::multisig_account::can_be_executed \ - --args \ - address:"$multisig_addr" \ - String:2 -``` - -
Output - -```bash -{ - "Result": [ - false - ] -} -``` - -
- -Before Ace votes, however, he verifies that the payload stored on-chain matches the function arguments he expects: - -```bash title="Verifying transaction proposal" -aptos multisig verify-proposal \ - --multisig-address $multisig_addr \ - --function-id $multisig_addr::cli_args::set_vals \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args \ - u8:123 \ - "bool:[false, true, false, false]" \ - 'address:[["0xace", "0xbee"], ["0xcad"], []]' \ - --sequence-number 2 -``` - -
Output - -```bash -{ - "Result": { - "Status": "Transaction match", - "Multisig transaction": { - "creation_time_secs": "1685078954", - "creator": "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "payload": { - "vec": [ - "0x0057478da34604655c68b1dcb89e4f4a9124b6c0ecc1c59a0931d58cc4e60ac5c508636c695f61726773087365745f76616c7302070000000000000000000000000000000000000000000000000000000000000001076163636f756e74074163636f756e740007000000000000000000000000000000000000000000000000000000000000000108636861696e5f696407436861696e49640003017b0504000100006403020000000000000000000000000000000000000000000000000000000000000ace0000000000000000000000000000000000000000000000000000000000000bee010000000000000000000000000000000000000000000000000000000000000cad00" - ] - }, - "payload_hash": { - "vec": [] - }, - "votes": { - "data": [ - { - "key": "0xbeec980219d246581cef5166dc6ba5fb1e090c7a7786a5176d111a9029b16ddc", - "value": true - } - ] - } - } - } -} -``` - -
- -Note that the verification fails if he modifies even a single argument: - -```bash title="Failed transaction verification with modified u8" -aptos multisig verify-proposal \ - --multisig-address $multisig_addr \ - --function-id $multisig_addr::cli_args::set_vals \ - --type-args \ - 0x1::account::Account \ - 0x1::chain_id::ChainId \ - --args \ - u8:200 \ - "bool:[false, true, false, false]" \ - 'address:[["0xace", "0xbee"], ["0xcad"], []]' \ - --sequence-number 2 -``` - -
Output - -```bash -{ - "Error": "Unexpected error: Transaction mismatch: The transaction you provided has a payload hash of 0xe494b0072d6f940317344967cf0e818c80082375833708c773b0275f3ad07e51, but the on-chain transaction proposal you specified has a payload hash of 0x070ed7c3f812f25f585461305d507b96a4e756f784e01c8c59901871267a1580. For more info, see https://aptos.dev/move/move-on-aptos/cli#multisig-governance" -} -``` - -
- -Ace approves the transaction: - -```bash title="Approving transaction" -aptos multisig approve \ - --multisig-address $multisig_addr \ - --sequence-number 2 \ - --private-key-file ace.key \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0x233427d95832234fa13dddad5e0b225d40168b4c2c6b84f5255eecc3e68401bf", - "gas_used": 6, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 7, - "success": true, - "timestamp_us": 1685080266378400, - "version": 528439883, - "vm_status": "Executed successfully" - } -} -``` - -
- -Since the payload was stored on-chain, it is not required to execute the pending transaction: - -```bash title="Execution" -aptos multisig execute \ - --multisig-address $multisig_addr \ - --private-key-file ace.key \ - --max-gas 10000 \ - --assume-yes -``` - -
Output - -```bash -{ - "Result": { - "transaction_hash": "0xbc99f929708a1058b223aa880d04607a78ebe503367ec4dab23af4a3bdb541b2", - "gas_used": 505, - "gas_unit_price": 100, - "sender": "acef1b9b7d4ab208b99fed60746d18dcd74865edb7eb3c3f1428233988e4ba46", - "sequence_number": 8, - "success": true, - "timestamp_us": 1685080344045461, - "version": 528440423, - "vm_status": "Executed successfully" - -``` - -
diff --git a/developer-docs-site/docs/move/move-on-aptos/cryptography.md b/developer-docs-site/docs/move/move-on-aptos/cryptography.md deleted file mode 100644 index f255286648515..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/cryptography.md +++ /dev/null @@ -1,245 +0,0 @@ ---- -title: "Cryptography" ---- - -# Cryptography in Move - -Cryptography plays an integral role in ensuring the security, integrity, confidentiality, and immutability of data in blockchain systems. The Aptos adapter for Move provides developers with an array of cryptographic primitives to cater to this need. This document delves into the cryptographic functionalities offered by Move on Aptos and elucidates the principles that drive their design. - -## Cryptographic primitives - -Move, through the Aptos adapter, encompasses several fundamental cryptographic tools: - -1. [Cryptographic Hash Functions](#cryptographic-hash-functions) – Algorithms that produce a fixed-size output (hash) from variable-sized input data. Supported functions include SHA2-256, SHA3-256, Keccak256, and Blake2b-256. -2. [Digital Signature Verification](#digital-signature-verification) – Algorithms for signing a message so as to ensure its integrity, authenticate its sender, ensure non-repudiation, or any combination thereof. Supported signature schemes include Ed25519, ECDSA, and BLS. -3. [Elliptic Curve Arithmetic](#elliptic-curve-arithmetic) – Elliptic curves are one of the building blocks of advanced cryptographic primitives, such as digital signatures, public-key encryption or verifiable secret sharing. Supported curves include Ristretto255 and BLS12-381. -4. [Zero-Knowledge Proofs (ZKP)](#building-powerful-cryptographic-applications) – These cryptographic techniques enable a party to prove that a relation $R(x; w)$ is satisfied on a public statement $x$ without leaking the secret witness $w$ that makes it hold. Currently, we support Groth16 ZKP verification and Bulletproofs ZK range proof verification. - -Three fundamental principles guide the design and integration of the Aptos cryptographic extensions into Move: - -1. **Economic Gas Usage** – Striving to minimize gas costs for Move developers by implementing key primitives as [Move native functions](../book/functions.md#native-functions). For example, see the module for [BLS signatures over BLS12-381 elliptic curves](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/bls12381.move). -2. **Type-Safe APIs** – Ensuring that APIs are resistant to common mistakes, type-safety enhances code reliability and promotes an efficient development process. For an example, see the [Ed25519 signature module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ed25519.move). -3. **Empowerment of Developers** – In instances where native functions are unavailable, we empower developers to build their own cryptographic primitives on top of abstract cryptographic building blocks such as _finite fields_ and _Abelian groups_. Refer to the [`aptos_std::crypto_algebra`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/crypto_algebra.move) module for more insights. - -Continue reading to delve a bit deeper and uncover some of the intricacies behind these extensions, as well as the range of applications they empower. For the most comprehensive understanding of this subject, refer to the [cryptography Move modules code](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/aptos-stdlib/sources/cryptography). - -## Cryptographic hash functions - -Developers can now use more cryptographic hash functions in Move via the [`aptos_std::aptos_hash`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/hash.move) module: - -| Hash function | Hash size (bits) | Cost for hashing 1KiB (in internal gas units) | Collision-resistance security (bits) | -|---------------|------------------|-----------------------------------------------|--------------------------------------| -| Keccak256 | 256 | 1,001,600 | 128 | -| SHA2-256 | 256 | 1,084,000 | 128 | -| SHA2-512 | 512 | 1,293,600 | 256 | -| SHA3-256 | 256 | 1,001,600 | 128 | -| SHA3-512 | 512 | 1,114,000 | 256 | -| RIPEMD160 | 160 | 1,084,000 | 80 (**weak**) | -| Blake2b-256 | 256 | 342,200 | 128 | - -All hash functions have the same security properties (e.g., one-wayness, collision resistance, etc.), but their security levels are different. - -:::caution -RIPEMD160 should be avoided as a collision-resistant function due to its 80-bit security level. It is mainly supported for backward-compatibility reasons: e.g., Bitcoin address derivation relies on RIPEMD160. -::: - -Some of these functions can be used for interoperability with other chains (e.g., verifying Ethereum Merkle proofs via [`aptos_std::aptos_hash::keccak256`](https://github.com/aptos-labs/aptos-core/blob/137acee4c6dddb1c86398dce25b041d78a3028d3/aptos-move/framework/aptos-stdlib/sources/hash.move#L35)). -Others, have lower gas costs, such as [`aptos_std::aptos_hash::blake2b_256`](https://github.com/aptos-labs/aptos-core/blob/137acee4c6dddb1c86398dce25b041d78a3028d3/aptos-move/framework/aptos-stdlib/sources/hash.move#L69). -In general, a wider variety of hash functions give developers additional freedom in terms of both security and interoperability with other off-chain cryptographic systems. - -## Digital signature verification - -Developers can now use a *type-safe* API for verifying many kinds of digital signatures in Move: - -| Signature scheme | Curve | Sig. size (bytes) | PK size (bytes) | Malleability | Assumptions | Pros | Cons | -|-----------------------------------------------------------------------------------------------------------------------------------------------------------|---------------|-------------------|-----------------|--------------|-------------|---------------|---------------------| -| [ECDSA](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/secp256k1.move) | secp256k1 | 64 | 64 | Yes | GGM | Wide adoption | Security proof | -| [Ed25519](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ed25519.move) | Edwards 25519 | 64 | 32 | No | DLA, ROM | Fast | Subtleties | -| [MultiEd25519](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/multi_ed25519.move) | Edwards 25519 | $4 + t \cdot 64$ | $n \cdot 32$ | No | DLA, ROM | Easy-to-adopt | Large sig. size | -| [MinPK BLS](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/bls12381.move) | BLS12-381 | 96 | 48 | No | CDH, ROM | Versatile | Slower verification | -| [MinSig BLS](https://github.com/aptos-labs/aptos-core/blob/7d4fb98c6604c67e526a96f55668e7add7aaebf6/aptos-move/move-examples/drand/sources/drand.move#L57) | BLS12-381 | 48 | 96 | No | CDH, ROM | Versatile | Slower verification | - - -:::note - - CDH stands for the _"Computational Diffie-Hellman Assumption"_ - - DLA stands for the _"Discrete Log Assumption"_ - - GGM stands for the _"Generic Group Model"_ - - ROM stands for the _"Random Oracle Model"_ -::: - -The digital signature modules above can be used to build smart contract-based wallets, secure claiming mechanisms for airdrops, or any digital-signature-based access-control mechanism for dapps. - -The right choice of a signature scheme in your dapp could depend on many factors: -1. **Backwards-compatibility** - - If your dapp's user base predominantly uses a particular signing mechanism, it would be prudent to support that mechanism for ease of transition and adoption. - - Example: If users mainly sign using Ed25519, it becomes a logical choice. -2. **Ease-of-implementation** - - While theoretically sound, complex protocols may be challenging to implement in practice. - - Example: Even though $t$-out-of-$n$ threshold protocols for Ed25519 exist, their intricacy on the signer's side might push developers toward MultiEd25519 due to its more straightforward signing implementation. -3. **Efficiency** - - Depending on the dapp's requirements, you might prioritize one aspect of efficiency over another. - - Signature size vs. public key size: Some applications might prioritize a smaller signature footprint, while others might emphasize a compact PK. - - Signing time vs. verification time: For certain dapps, the signing speed might be more crucial, while for others, rapid signature verification could be the priority. -4. **Security analysis** - - It is essential to consider the underlying assumptions and potential vulnerabilities of a signature scheme. - - Example: ECDSA's security is proven under strong assumptions such as the Generic Group Model (GGM). - - Malleability concerns: Some signature schemes are susceptible to malleability, where a valid signature, $\sigma$, can be mauled into a different yet still valid signature, $\sigma'$, for the same message $m$. -5. **Versatility** - - The adaptability and flexibility of signature schemes are important to consider so you may properly accommodate the cryptographic needs of your dapp. - - Example: $t$-out-of-$n$ threshold BLS signatures are very simple to implement. - -:::caution -Despite its careful, principled design[^ed25519], Ed25519 has known implementation subtleties. For example, different implementations could easily disagree on the validity of signatures, especially when batch verification is employed[^devalence]$^,$[^eddsa]. -::: - -:::tip -Our [`aptos_std::bls12381`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/bls12381.move) module for [MinPK BLS](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-bls-signature-05#name-variants) supports verification of individual signatures, **multi**-signatures, **aggregate** signatures and **threshold** signatures. -::: - -## Elliptic curve arithmetic - -While the [hash function](#cryptographic-hash-functions) and [digital signature](#digital-signature-verification) modules should provide enough functionality for most applications, some applications will require more powerful cryptography. -Normally, developers of such applications would have to wait until their desired cryptographic functionality is implemented efficiently as a [Move native function](../book/functions.md#native-functions) in the [Aptos Move framework](/reference/move). -Instead, we expose basic building blocks that developers can use to implement their own cryptographic primitives directly in the Move language and do so **efficiently**. - -Specifically, we currently expose low-level arithmetic operations on two popular elliptic curve groups and their associated finite fields: - - 1. Ristretto255, via [`aptos_std::ristretto255`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ristretto255.move) - 2. BLS12-381, via [`aptos_std::crypto_algebra`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/crypto_algebra.move) - and [`aptos_std::bls12381_algebra`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/bls12381_algebra.move) - -These modules support low-level operations such as: - - * scalar multiplication of elliptic curve points - * multi-scalar multiplications (MSMs) - * pairings - * scalar addition, multiplication, inversion - * hashing to a scalar or to a point - * and many more - -Examples of powerful applications that can be built on top include: - - 1. **Validity rollups** – See the [`groth16` zkSNARK verifier example](#groth16-zksnark-verifier). - 2. **Randomness-based games** – See the [`drand` verifier example](#verifying-randomness-from-the-drand-beacon). - 3. **Privacy-preserving applications** – See the [`veiled_coin` example](#veiled-coins). - -### Ristretto255 arithmetic - -The [`aptos_std::ristretto255`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ristretto255.move) module provides support for elliptic curve arithmetic on the popular [Ristretto255 curve](https://ristretto.group/). -One of the main advantages of Ristretto255 is that it is a prime order group (unlike the Edwards 25519 curve), which obviates small-subgroup attacks on higher-level cryptosystems built on top of it. -Furthermore, Ristretto255 serialization is canonical and deserialization only accepts canonical encodings, which obviates malleability issues in higher-level protocols. - -This module has proven useful for implementing several cryptographic primitives: - - 1. **Zero-knowledge $\Sigma$-protocols** – See the [`veiled_coin` example](#veiled-coins). - 2. **ElGamal** encryption – See [`aptos_std::ristretto255_elgamal`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ristretto255_elgamal.move) - 3. **Pedersen** commitments – See [`aptos_std::ristretto255_pedersen`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ristretto255_pedersen.move) - 4. **Bulletproofs** ZK range proofs[^bulletproofs] – See [`aptos_std::ristretto255_bulletproofs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/ristretto255_bulletproofs.move) - -Need ideas for a cryptosystem to build on top of `ristretto255`? -A popular primitive that you could easily build would be the [schnorrkel](https://github.com/w3f/schnorrkel) signature scheme, which is a hardended version of Schnorr signatures over Ristretto255 groups. - -### Generic elliptic curve arithmetic - -What is better than one curve? More curves! - -The [`aptos_std::crypto_algebra`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/crypto_algebra.move) provides elliptic curve arithmetic operations for **any** supported elliptic curve, including pairing-friendly curves. -As a consequence, Move developers can implement a cryptosystem generically over **any** curve that is or will be supported in the future. -Compared to fixing a particular curve in the code (e.g., by implementing against the [Ristretto255 module](#ristretto255-arithmetic)), this approach provides more flexibility and lowers development time when migrating to a different curve. - -Although currently the `crypto_algebra` module only supports arithmetic over BLS12-381 curves (via the marker types declared in [`aptos_std::bls12381_algebra`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/cryptography/bls12381_algebra.move)), more curves will be supported into the future (e.g., BN254, Ristretto255, BLS12-377, BW6-761, secp256k1, secp256r1). - -As an example, a Move developer can implement the popular Boneh-Lynn-Shacham (BLS) signature scheme generically over **any** curve by using [type arguments](../../../move/book/functions#type-parameters) for the curve type in their implementation: - -```rust title="Generic BLS signature verification over any curve" -use std::option; -use aptos_std::crypto_algebra::{eq, pairing, one, deserialize, hash_to}; - -/// Example of a BLS signature verification function that works over any pairing-friendly -/// group triple `Gr1`, `Gr2`, `GrT` where signatures are in `Gr1` and PKs in `Gr2`. -/// Points are serialized using the format in `FormatG1` and `FormatG2` and the hashing -/// method is `HashMethod`. -/// -/// WARNING: This example is type-unsafe and probably not a great fit for production code. -public fun bls_verify_sig( - dst: vector, - signature: vector, - message: vector, - public_key: vector): bool -{ - let sig = option::extract(&mut deserialize(&signature)); - let pk = option::extract(&mut deserialize(&public_key)); - let hash = hash_to(&dst, &message); - - // Checks if $e(H(m), pk) = e(sig, g_2)$, where $g_2$ generates $\mathbb{G}_2$ - eq( - &pairing(&hash, &pk), - &pairing(&sig, &one()) - ) -} -``` - -Using the `bls_verify_sig` _generic_ function from above, developers can verify BLS signatures over **any** of the supported (pairing-friendly) curves. -For example, one can verify [MinSig BLS](https://datatracker.ietf.org/doc/html/draft-irtf-cfrg-bls-signature-05#name-variants) signatures over BLS12-381 curves by calling the function above with the right BLS12-381 marker types as its type arguments: - -```rust title="MinSig BLS signature verification over BLS12-381" -use aptos_std::bls12381_algebra::{ - G1, G2, Gt, FormatG1Compr, FormatG2Compr, HashG1XmdSha256SswuRo -}; - -// Aborts with code 1 if the MinSig BLS signature over the BLS12-381 curve fails to verify. -assert( - bls_verify_sig( - dst, signature, message, public_key - ), - 1 -); -``` - -For more use cases of the `crypto_algebra` module, check out some Move examples: - -1. [Verifying Groth16 zkSNARK proofs](#groth16-zksnark-verifier) over **any** curve -2. [Verifying randomness from the `drand` beacon](#verifying-randomness-from-the-drand-beacon) - -## Building powerful cryptographic applications - -### Veiled coins - -The [`veiled_coin` example](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/veiled_coin/sources) demonstrates how to use [the Ristretto255 modules from above](#ristretto255-arithmetic) to add a reasonable layer of confidentiality to coin balances and transactions. - -Specifically, users can **veil** their balance, keeping it hidden from everyone, including validators. -Furthermore, a user can send a **veiled transaction** that hides the transaction amount from everybody, including validators. -An important caveat is that veiled transactions do **not** hide the identities of the sender or the recipient. - -:::danger -This module is educational. It is **not** production-ready. Using it could lead to loss of funds. -::: - -### Groth16 zkSNARK verifier - -The [`groth16` example](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/move-examples/groth16_example/sources/groth16.move) demonstrates how to verify Groth16 zkSNARK proofs[^groth16], which are the shortest, fastest-to-verify, general-purpose zero-knowledge proofs. -Importantly, as explained [above](#generic-elliptic-curve-arithmetic), this implementation is *generic* over **any** curve, making it very easy for Move developers to use it with their favorite (supported) curves. - -:::caution -This code has not been audited by a third-party organization. If using it in a production system, proceed at your own risk. -::: - -### Verifying randomness from the `drand` beacon - -The [`drand` example](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/drand/sources) shows how to verify public randomness from the [drand](https://drand.love) randomness beacon. -This randomness can be used in games or any other chance-based smart contract. -We give a simple example of a lottery implemented on top of `drand` randomness in [`lottery.move`](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/drand/sources/lottery.move). - -:::caution -This code has not been audited by a third-party organization. If using it in a production system, proceed at your own risk. -::: - -Another application that can be built on top of `drand` is time-lock encryption[^tlock], which allows users to encrypt information such that it can only be decrypted in a future block. -We do not currently have an implementation but the reader is encouraged to write one! - -[^bulletproofs]: _bulletproofs:_ **Bulletproofs: Short Proofs for Confidential Transactions and More**; by B. Bünz and J. Bootle and D. Boneh and A. Poelstra and P. Wuille and G. Maxwell; in 2018 IEEE Symposium on Security and Privacy -[^devalence]: _devalence:_ **It’s 255:19AM. Do you know what your validation criteria are?**, by Henry de Valence, [https://hdevalence.ca/blog/2020-10-04-its-25519am](https://hdevalence.ca/blog/2020-10-04-its-25519am) -[^ed25519]: _ed25519:_ **Ed25519: high-speed high-security signatures**, by Daniel J. Bernstein, Niels Duif, Tanja Lange, Peter Schwabe, Bo-Yin Yang, [https://ed25519.cr.yp.to/](https://ed25519.cr.yp.to/) -[^eddsa]: _eddsa:_ **Taming the Many EdDSAs**, by Konstantinos Chalkias, François Garillot, Valeria Nikolaenko, in SSR 2020, [https://dl.acm.org/doi/abs/10.1007/978-3-030-64357-7_4](https://dl.acm.org/doi/abs/10.1007/978-3-030-64357-7_4) -[^groth16]: _groth16:_ **On the Size of Pairing-Based Non-interactive Arguments**; by Groth, Jens; in EUROCRYPT 2016 -[^tlock]: _tlock:_ **tlock: Practical Timelock Encryption from Threshold BLS**; by Nicolas Gailly and Kelsey Melissaris and Yolan Romailler; [https://eprint.iacr.org/2023/189](https://eprint.iacr.org/2023/189) diff --git a/developer-docs-site/docs/move/move-on-aptos/gas-profiling.md b/developer-docs-site/docs/move/move-on-aptos/gas-profiling.md deleted file mode 100644 index 808ef4923ed39..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/gas-profiling.md +++ /dev/null @@ -1,137 +0,0 @@ ---- -title: "Gas Profiling" ---- - -import CodeBlock from '@theme/CodeBlock'; - -# Gas Profiling -The Aptos Gas Profiler is a powerful tool that can help you understand the gas usage of Aptos transactions. -Once activated, it will simulate transactions using an instrumented VM, and generate a web-based report. [Sample] - -The gas profiler can also double as a debugger since the report also includes a full execution trace. - -## Using the Gas Profiler -The gas profiler can be invoked by appending the `--profile-gas` option to Aptos CLI’s `move publish`, `move run` or `move run-script` commands. - -Here is an example using the [hello_blockchain package from move examples](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/hello_blockchain). First, `cd` into the package directory. - -```bash -$ cd aptos-move/move-examples/hello_blockchain -``` - -Then, we can simulate module publishing with the extra option `--profile-gas`. - -Notice that you do need to have your CLI profile set up properly and bind the named addresses correctly. Please refer to [CLI Configuration](../../../tools/aptos-cli/use-cli/cli-configuration#initialize-local-configuration-and-create-an-account) for more details. -```bash -$ aptos move publish --named-addresses hello_blockchain=default --profile-gas -``` - -This will result in some terminal output that looks like this: -```bash title="Output" -Compiling, may take a little while to download git dependencies... -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING Examples -package size 1755 bytes - -Simulating transaction locally with the gas profiler... -{ - "Result": { - "transaction_hash": "0x26cc23d11070e6756c6b2ae0ea7d3fc4c791b59cf821f268ba0f03eebb487543", - "gas_used": 1039, - "gas_unit_price": 100, - "sender": "dbcbe741d003a7369d87ec8717afb5df425977106497052f96f4e236372f7dd5", - "success": true, - "version": 762354147, - "vm_status": "status EXECUTED of type Execution" - } -} -``` -Again, it should be emphasized that even though the live chain-state is being used, this is a simulation so the module has NOT really been published to the target network. - -You can then find the generated gas report in the directory `gas-profiling`: -```text title="Directory Layout" -- hello_blockchain - - gas-profiling - - txn-xxxxxxxx-0x1-code-publish_package_txn - - assets - - index.html - - sources - - Move.toml -``` -index.html is the main page of the report and you can view it in your web browser. - -## Understanding the Gas Report -The gas report consists of three parts, enabling you to understand the gas usage through different lenses. - -### Flamegraphs -The first section consists of visualization of the gas usage in the form of two flamegraphs: one for execution & IO, the other for storage. -The reason why we need two graphs is because these are measured in different units: one in gas units, and the other in APT. - -It is possible to interact with various elements in the graph. If you hover your cursor over an item, it will show you the precise cost and percentage. -![gas-profiling-flamegraph-0.png](../../../static/img/docs/gas-profiling-flamegraph-0.png) - -If you click on an item, you can zoom into it and see the child items more clearly. -You can reset the view by clicking the "Reset Zoom" button in the top-left corner. -![gas-profiling-flamegraph-1.png](../../../static/img/docs/gas-profiling-flamegraph-1.png) - -There is also “Search” button in the top-right corner that allows to match certain items and highlight them. -![gas-profiling-flamegraph-2.png](../../../static/img/docs/gas-profiling-flamegraph-2.png) - -### Cost Break-down -The second section is a detailed break-down of all gas costs. Data presented in this section is categorized, aggregated and sorted. -This can be especially helpful if you know what numbers to look at. - -For example, the following tables show the IO costs of all storage operations. -The percentage here is relative to the total cost of the belonging category (Exec + IO in this case). - -![gas-profiling-cost-break-down-table.png](../../../static/img/docs/gas-profiling-cost-break-down-table.png) - - -### Full Execution Trace -The final section of the gas report is the full execution trace of the transaction that looks like this: - -``` -execution & IO (gas unit, full trace) 106.45206 100.00% - intrinsic 3.94 3.70% - 0x1::code::publish_package_txn 100.02706 93.96% - move_loc 0.0024 0.00% - move_loc 0.0024 0.00% - call_generic 0.024 0.02% - 0x1::util::from_bytes<0x1::code::PackageMetadata> 0.1094 0.10% - move_loc 0.0024 0.00% - call 0.064 0.06% - 0x1::code::publish_package 99.82126 93.77% - call 0.02 0.02% - 0x1::code::upgrade_policy_arbitrary 0.0076 0.01% - ld_u8 0.0012 0.00% - pack 0.0052 0.00% - ret 0.0012 0.00% - st_loc 0.0024 0.00% - imm_borrow_loc 0.0012 0.00% - imm_borrow_field 0.004 0.00% - imm_borrow_field 0.004 0.00% - read_ref 0.0072 0.01% - imm_borrow_loc 0.0012 0.00% - imm_borrow_field 0.004 0.00% - read_ref 0.0072 0.01% - gt 0.0032 0.00% - br_false 0.0024 0.00% - branch 0.0016 0.00% - @17 -... -``` -The left column lists all Move instructions and operations being executed, with each level of indentation indicating a function call. - -The middle column represents the gas costs associated with the operations. - -There is also a special notation `@number` that represents a jump to a particular location in the byte code. -This is purely informational and to help understand the control flow. - -## Future Plans -We plan to extend the gas profiler with the following features: -- Ability to replay historical transactions that have been committed (on mainnet, testnet etc.). -- Ability to annotate source files. - -Feedbacks and feature requests are welcome! Please kindly submit them by creating GitHub issues [here](https://github.com/aptos-labs/aptos-core/issues). diff --git a/developer-docs-site/docs/move/move-on-aptos/modules-on-aptos.md b/developer-docs-site/docs/move/move-on-aptos/modules-on-aptos.md deleted file mode 100644 index 131e787b691a5..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/modules-on-aptos.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: "Modules on Aptos" -slug: "modules-on-aptos" ---- - -# Modules on Aptos - -Aptos allows for permissionless publishing of [modules](../book/modules-and-scripts.md) within a [package](../book/packages.md) as well as [upgrading](../book/package-upgrades.md) those that have appropriate compatibility policy set. - -A module contains several structs and functions, much like Rust. - -During package publishing time, a few constraints are maintained: -* Both Structs and public function signatures are published as immutable. -* Only when a module is being published for the first time, and not during an upgrade, will the VM search for and execute an `init_module(account: &signer)` function. The signer of the account that is publishing the module is passed into the `init_module` function of the contract. **This function must be private and not return any value.** - -:::tip `init_module` is optional -It is only necessary if you want to initialize data when publishing a module for the first time. -::: diff --git a/developer-docs-site/docs/move/move-on-aptos/move-scripts.md b/developer-docs-site/docs/move/move-on-aptos/move-scripts.md deleted file mode 100644 index a988dde570074..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/move-scripts.md +++ /dev/null @@ -1,143 +0,0 @@ ---- -title: "Move Scripts" -slug: "move-scripts" ---- - -# Move Scripts - -This tutorial explains how to write and execute a [Move script](../book/modules-and-scripts.md). You can use Move scripts to execute a series of commands across published Move module interfaces. - -## Example use case - -The following example calls functions on the [aptos_coin.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/aptos_coin.move) module to confirm the balance of the destination account is less than `desired_balance`, and if so, tops it up to `desired_balance`. - -```move -script { - use std::signer; - use aptos_framework::aptos_account; - use aptos_framework::aptos_coin; - use aptos_framework::coin; - - fun main(src: &signer, dest: address, desired_balance: u64) { - let src_addr = signer::address_of(src); - - addr::my_module::do_nothing(); - - let balance = coin::balance(src_addr); - if (balance < desired_balance) { - aptos_account::transfer(src, dest, desired_balance - balance); - }; - } -} -``` - -## Execution - -Now that you know what you would like to accomplish, you need to determine: - -- Where do I put these files? -- What do I name them? -- Do I need a `Move.toml`? -- How do I run my script with the CLI? - -Let us run through how to execute a Move script with a step-by-step example using the [Aptos CLI](../../tools/aptos-cli/use-cli/use-aptos-cli.md). - -1. Make a new directory for your work: - ```sh - mkdir testing - cd testing - ``` - -2. Set up the Aptos CLI and [create an account](../../tools/aptos-cli/use-cli/use-aptos-cli#initialize-local-configuration-and-create-an-account): - ```sh - aptos init --network devnet - ``` - - You may reuse an existing private key (which looks like this: `0xbd944102bf5b5dfafa7fe865d8fa719da6a1f0eafa3cd600f93385482d2c37a4`), or it can generate a new one for you, as part of setting up your account. Let's say your account looks like the example below: - ```sh - --- - profiles: - default: - private_key: "0xbd944102bf5b5dfafa7fe865d8fa719da6a1f0eafa3cd600f93385482d2c37a4" - public_key: "0x47673ec83bb254cc9a8bfdb31846daacd0c96fe41f81855462f5fc5306312b1b" - account: cb265645385819f3dbe71aac266e319e7f77aed252cacf2930b68102828bf615 - rest_url: "https://fullnode.devnet.aptoslabs.com" - faucet_url: "https://faucet.devnet.aptoslabs.com" - ``` - -3. From this same directory, initialize a new Move project: - ```sh - aptos move init --name run_script - ``` - -4. Create a `my_script.move` file containing the example script above in a `sources/` subdirectory of your `testing/` directory. Also, create a `my_module.move` file as seen in the example below: - ``` - module addr::my_module { - public entry fun do_nothing() { } - } - ``` - - This results in the following file structure: - ``` - testing/ - Move.toml - sources/ - my_script.move - my_module.move - ``` - -5. Compile the script: - ``` - $ aptos move compile --named-addresses addr=cb265645385819f3dbe71aac266e319e7f77aed252cacf2930b68102828bf615 - Compiling, may take a little while to download git dependencies... - INCLUDING DEPENDENCY AptosFramework - INCLUDING DEPENDENCY AptosStdlib - INCLUDING DEPENDENCY MoveStdlib - BUILDING run_script - { - "Result": [ - "cb265645385819f3dbe71aac266e319e7f77aed252cacf2930b68102828bf615::my_module" - ] - } - ``` - - Note how we use the `--named-addresses` argument. This is necessary because in the code we refer to this named address called `addr`. The compiler needs to know what this refers to. Instead of using this CLI argument, you could put something like this in your `Move.toml`: - - ``` - [addresses] - addr = "cb265645385819f3dbe71aac266e319e7f77aed252cacf2930b68102828bf615" - ``` - -6. Run the compiled script: - ``` - $ aptos move run-script --compiled-script-path build/my_script/bytecode_scripts/main.mv --args address:b078d693856a65401d492f99ca0d6a29a0c5c0e371bc2521570a86e40d95f823 --args u64:5 - Do you want to submit a transaction for a range of [17000 - 25500] Octas at a gas unit price of 100 Octas? [yes/no] > - yes - { - "Result": { - "transaction_hash": "0xa6ca6275c73f82638b88a830015ab81734a533aebd36cc4647b48ff342434cdf", - "gas_used": 3, - "gas_unit_price": 100, - "sender": "cb265645385819f3dbe71aac266e319e7f77aed252cacf2930b68102828bf615", - "sequence_number": 4, - "success": true, - "timestamp_us": 1683030933803632, - "version": 3347495, - "vm_status": "Executed successfully" - } - } - ``` - -Note that the path of the compiled script is under `build/run_script/`, not `build/my_script/`. This is because it uses the name of the project contained in `Move.toml`, which is `run_script` from when we ran `aptos move init --name run_script`. - -See the [code](https://github.com/banool/move-examples/tree/main/run_script) used for this document. The full example explains how to use a Move script that relies on a user-created Move module as well. - -See also how to do this with the [Rust SDK](https://stackoverflow.com/questions/74452702/how-do-i-execute-a-move-script-on-aptos-using-the-rust-sdk) instead of the Aptos CLI in Stack Overflow. - -## Advanced - -You may execute a script in a more streamlined fashion; instead of running `aptos move compile` and then `aptos move run-script --compiled-script-path` separately, you can just do this: -``` -$ aptos move run-script --script-path sources/my_script.move --args address:b078d693856a65401d492f99ca0d6a29a0c5c0e371bc2521570a86e40d95f823 --args u64:5 -``` -This will conduct both steps with a single CLI command yet has [issues](https://github.com/aptos-labs/aptos-core/issues/5733). For this reason, we recommend using the previous two-step approach for now. diff --git a/developer-docs-site/docs/move/move-on-aptos/resource-accounts.md b/developer-docs-site/docs/move/move-on-aptos/resource-accounts.md deleted file mode 100644 index 560958e429aab..0000000000000 --- a/developer-docs-site/docs/move/move-on-aptos/resource-accounts.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: "Resource Accounts" -slug: "resource-accounts" ---- - -# Resource Accounts - -A [resource account](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/resource_account.move) is a developer feature used to manage resources independent of an account managed by a user, specifically publishing modules and providing on-chain-only access control, e.g., signers. - -Typically, a resource account is used for two main purposes: - -* Store and isolate resources; a module creates a resource account just to host specific resources. -* Publish module as a standalone (resource) account, a building block in a decentralized design where no private keys can control the resource account. The ownership (SignerCap) can be kept in another module, such as governance. - -## Restrictions - -In Aptos, a resource account is created based upon the SHA3-256 hash of the source's address and additional seed data. A resource account can be created only once; for a given source address and seed, there can be only one resource account. That is because the calculation of the resource account address is fully determined by the former. - -An entity may call `create_account` in an attempt to claim an account ahead of the creation of a resource account. But if a resource account is found, Aptos will transition ownership of the account over to the resource account. This is done by validating that the account has yet to execute any transactions and that the `Account::signer_capbility_offer::for` is none. The probability of a collision where someone has legitimately produced a private key that maps to a resource account address is improbably low. - -## Setup - -The easiest way to set up a resource account is by: - -1. Using Aptos CLI: `aptos account create-resource-account` creates a resource account, and `aptos move create-resource-account-and-publish-package` creates a resource account and publishes the specified package under the resource account's address. -1. Writing custom smart contracts code: in the `resource_account.move` module, developers can find the resource account creation functions `create_resource_account`, `create_resource_account_and_fund`, and `create_resource_account_and_publish_package`. Developers can then call those functions to create resource accounts in their smart contracts. - -Each of those options offers slightly different functionality: -* `create_resource_account` - merely creates the resource account but doesn't fund it, retaining access to the resource account's signer until explicitly calling `retrieve_resource_account_cap`. -* `create_resource_account_and_fund` - creates the resource account and funds it, retaining access to the resource account's signer until explicitly calling `retrieve_resource_account_cap`. -* `create_resource_account_and_publish_package` - creates the resource account and results in loss of access to the resource account by design, because resource accounts are used to make contracts autonomous and immutable. - -In this example, you will [initialize](https://github.com/aptos-labs/aptos-core/blob/2e9d8ee759fcd3f6e831034f05c1656b1c48efc4/aptos-move/move-examples/mint_nft/sources/minting.move#L73) the `mint_nft` module and retrieve the signer capability from both the resource account and module account. To do so, call `create_resource_account_and_publish_package` to publish the module under the resource account's address. - -1. Initialize the module as shown in the [`minting.move`](https://github.com/aptos-labs/aptos-core/blob/2e9d8ee759fcd3f6e831034f05c1656b1c48efc4/aptos-move/move-examples/mint_nft/sources/minting.move#L73) example. -1. Call `create_resource_account_and_publish_package` to publish the module under the resource account's address, such as in the [`mint_nft.rs`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/e2e-move-tests/src/tests/mint_nft.rs#L62) end-to-end example. -1. Retrieve the signer cap from the resource account + module account as shown in the [`minting.move`](https://github.com/aptos-labs/aptos-core/blob/2e9d8ee759fcd3f6e831034f05c1656b1c48efc4/aptos-move/move-examples/mint_nft/sources/minting.move#L83) example. - -Note, if the above `resource_account` signer is **not** already set up as a resource account, retrieving the signer cap will fail. The `source_addr` field in the `retrieve_resource_account_cap` function refers to the address of the source account, or the account that creates the resource account. - -For an example, see the `SignerCapability` employed by the `mint_nft` function in [`minting.move`](https://github.com/aptos-labs/aptos-core/blob/2e9d8ee759fcd3f6e831034f05c1656b1c48efc4/aptos-move/move-examples/mint_nft/sources/minting.move#L143-L181). - -For more details, see the "resource account" references in [`resource_account.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/resource_account.move) and [`account.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/account.move). diff --git a/developer-docs-site/docs/move/prover/index.md b/developer-docs-site/docs/move/prover/index.md deleted file mode 100644 index 6e5711938717c..0000000000000 --- a/developer-docs-site/docs/move/prover/index.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: "Move Prover" -slug: "move-prover" ---- - -# Move Prover - -The Move Prover supports formal [specification](./spec-lang.md) and [verification](./prover-guide.md) of Move code. The Move Prover can automatically validate logical properties of Move smart contracts while offering a user experience similar to a type checker or linter. - -The Move Prover exists to make contracts more *trustworthy*; it: - -- Protects massive assets managed by the Aptos blockchain from smart contract bugs -- Protects against well-resourced adversaries -- Anticipates justified regulator scrutiny and compliance requirements -- Allows domain experts with a mathematical background, but not necessarily a software engineering background, to understand what smart contracts do - -For more information, refer to the documentation: - -- [Installation](../../tools/aptos-cli/install-cli/install-move-prover.md) -- [Move Prover User Guide](prover-guide.md) -- [Move Specification Language](spec-lang.md) -- [Move Prover Supporting Resources](supporting-resources.md) diff --git a/developer-docs-site/docs/move/prover/prover-guide.md b/developer-docs-site/docs/move/prover/prover-guide.md deleted file mode 100644 index 250a3c2daf44a..0000000000000 --- a/developer-docs-site/docs/move/prover/prover-guide.md +++ /dev/null @@ -1,207 +0,0 @@ ---- -title: "Move Prover User Guide" -slug: "prover-guide" ---- - -# Move Prover User Guide - -This is the user guide for the Move Prover. This document accompanies the -[Move specification language](spec-lang.md). See the sections below for details. - -## Running the Move Prover - -The Move Prover is invoked via the [Aptos CLI](../../tools/aptos-cli/use-cli/use-aptos-cli.md#move-examples). In order to call the CLI, you must have a [*Move package*](../book/packages.md) in place. In the simplest case, a Move package is defined by a directory with a set of `.move` files in it and a manifest of the name `Move.toml`. You can create a new Move package at a given location by running the command: `aptos move init --name ` - -Once the package exists, call the Move Prover from the directory to be tested or by supplying its path to the `--package-dir` argument: - - * Prove the sources of the package in the current directory: - ```shell - aptos move prove - ``` - - * Prove the sources of the package at <path>: - ```shell - aptos move prove --package-dir - ``` - -See example output and other available options in the [Proving Move](../../tools/aptos-cli/use-cli/use-aptos-cli.md#proving-move) section of Use Aptos CLI. - -### Target filtering - -By default, the `aptos move prove` command verifies all files of a package. During iterative development of larger packages, it is often more effective to focus verification on particular files with the -`-f` (`--filter`) option, like so: - -```shell script -aptos move prove -f coin -``` - -In general, if the string provided to the `-f` option is contained somewhere in the file name of a source, that source will be included for verification. - -> NOTE: the Move Prover ensures there is no semantic difference between verifying modules one-by-one -> or all at once. However, if your goal is to verify all modules, verifying them in a single -> `aptos move prove` run will be significantly faster than sequentially. - -### Prover options - -The Move Prover has a number of options (such as the filter option above) that you pass with an invocation of: `aptos move prove `. The most commonly used option is the `-t` (`--trace`) option that causes the Move Prover to produce richer diagnosis when it encounters errors: - -```shell script -aptos move prove -f coin -t -``` - -To see the list of all command line options, run: `aptos move prove --help` - -### Prover configuration file - -You can also create a Move Prover configuration file named `Prover.toml` that lives side-by-side with the `Move.toml` manifest file in the root of the package directory. For example, to enable tracing by default for a package, add a `Prover.toml` file with the following configuration: - -```toml -[prover] -auto_trace_level = "VerifiedFunction" -``` - -Find the most commonly used options in the example `.toml` below, which you can cut and paste and adopt for your needs (adjusting the defaults shown in the displayed values as needed): - -```toml -# Verbosity level -# Possible values: "ERROR", "WARN", "INFO", "DEBUG". Each level subsumes the output of the previous one. -verbosity_level = "INFO" - -[prover] -# Set auto-tracing level, which enhances the diagnosis the Move Prover produces on verification errors. -# Possible values: "Off", "VerifiedFunction", "AllFunctions" -auto_trace_level = "Off" - -# Minimal severity level for diagnosis to be reported. -# Possible values: "Error", "Warning", "Note" -report_severity = "Warning" - -[backend] -# Timeout in seconds for the solver backend. Note that this is a soft timeout and may not always -# be respected. -vc_timeout = 40 - -# Random seed for the solver backend. Different seeds can result in different verification run times, -# as the solver uses heuristics. -random_seed = 1 - -# The number of processor cores to assume for concurrent check of verification conditions. -proc_cores = 4 -``` - -> HINT: For local verification, you may want to set `proc_cores` to an aggressive number -> (your actual cores) to speed up the turnaround cycle. - - -## Prover diagnosis - -When the Move Prover finds a verification error, it prints diagnosis to standard output in a style similar to a compiler or a debugger. We explain the different types of diagnoses below, based on the following evolving example: - -```move -module 0x0::m { - struct Counter has key { - value: u8, - } - - public fun increment(a: address) acquires Counter { - let r = borrow_global_mut(a); - r.value = r.value + 1; - } - - spec increment { - aborts_if !exists(a); - ensures global(a).value == old(global(a)).value + 1; - } -} -``` - -We will modify this example as we demonstrate different types of diagnoses. - -### Unexpected abort - -If we run the Move Prover on the example immediately above, we get the following error: - -``` -error: abort not covered by any of the `aborts_if` clauses - ┌─ m.move:11:5 - │ - 8 │ r.value = r.value + 1; - │ - abort happened here with execution failure - · -11 │ ╭ spec increment { -12 │ │ aborts_if !exists(a); -13 │ │ ensures global(a).value == old(global(a)).value + 1; -14 │ │ } - │ ╰─────^ - │ - = at m.move:6: increment - = a = 0x29 - = at m.move:7: increment - = r = &mmm.Counter{value = 255u8} - = at m.move:8: increment - = ABORTED - -{ - "Error": "Move Prover failed: exiting with verification errors" -} -``` - -The Move Prover has generated an example counter that leads to an overflow when adding 1 to the value of 255 for an `u8`. This overflow occurs if the function specification calls for abort behavior, but the condition under which the function is aborting is not covered by the specification. And in fact, with `aborts_if !exists(a)`, we only cover the abort caused by the absence of the resource, but not the abort caused by the arithmetic overflow. - -Let's fix the above and add the following condition: - -```move -spec increment { - aborts_if global(a).value == 255; - ... -} -``` - -With this, the Move Prover will succeed without any errors. - -### Postcondition failure - -Let us inject an error into the `ensures` condition of the above example: - -```move -spec increment { - ensures global(a).value == /*old*/(global(a).value) + 1; -} -``` - -With this, the Move Prover will produce the following diagnosis: - -``` -error: post-condition does not hold - ┌─ m.move:14:9 - │ -14 │ ensures global(a).value == /*old*/(global(a).value) + 1; - │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - │ - = at m.move:6: increment - = a = 0x29 - = at m.move:7: increment - = r = &mmm.Counter{value = 0u8} - = at m.move:8: increment - = at m.move:9: increment - = at m.move:12: increment (spec) - = at m.move:15: increment (spec) - = at m.move:13: increment (spec) - = at m.move:14: increment (spec) - -{ - "Error": "Move Prover failed: exiting with verification errors" -} -``` - -While we know what the error is (as we just injected it), this is not particularly obvious in the output This is because we don't directly see on which values the `ensures` condition was actually evaluated. To see -this, use the `-t` (`--trace`) option; this is not enabled by default because it makes the verification problem slightly harder for the solver. - -Instead or in addition to the `--trace` option, you can use the built-in function `TRACE(exp)` in conditions to explicitly mark expressions whose values should be printed on verification failures. - -> NOTE: Expressions that depend on quantified symbols cannot be traced. Also, expressions appearing in -> specification functions can not currently be traced. - -## Debugging the Move Prover - -The Move Prover is an evolving tool with bugs and deficiencies. Sometimes it might be necessary to debug a problem based on the output the Move Prover passes to the underlying backends. If you pass the option `--dump`, the Move Prover will output the original Move bytecode, as well as the Move Prover bytecode, as the former is transformed during compilation. diff --git a/developer-docs-site/docs/move/prover/spec-lang.md b/developer-docs-site/docs/move/prover/spec-lang.md deleted file mode 100644 index 0ec8c8c553e9d..0000000000000 --- a/developer-docs-site/docs/move/prover/spec-lang.md +++ /dev/null @@ -1,1248 +0,0 @@ ---- -title: "Move Specification Language" -slug: "spec-lang" ---- - -# Move Specification Language - -This document describes the *Move specification language (MSL)*, a subset of the [Move](../move-on-aptos.md) language that supports specification of the behavior of Move programs. MSL works together with the [Move Prover](./index.md), a tool that can statically verify the correctness of MSL specifications against Move programs. In contrast to traditional testing, verification of MSL is exhaustive and holds for all possible inputs and global states of a [Move module](../../reference/glossary.md#move-module) or [transaction script](../../reference/glossary.md#transaction-or-move-script). At the same time, this verification of MSL is fast and automated enough that it can be used at a similar place in the developer workflow where tests are typically conducted (for example, for qualification of pull requests in continuous integration). - -While the Move programming language at this point is stable, the subset represented by MSL should be -considered evolving. This has no impact on platform stability, since MSL is not running in -production; yet MSL is used for offline quality assurance where it is continuously improved for -evolving objectives. - -This document describes the language only; see [Use the Move Prover](./prover-guide.md) for instructions. The reader is expected to have basic knowledge of the Move language, as well as basic -principles of pre- and post-condition specifications. (See for example the [Design by contract](https://en.wikipedia.org/wiki/Design_by_contract)). For examples of specifications, we refer to -the [Aptos framework](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/overview.md) documentation, which has specifications embedded. - -# Expressions - -Expressions in MSL are a subset of Move program expressions plus a set of additional constructs, as -discussed in the following sections. - -## Type System - -The type system of MSL is similar to that of Move, with the following differences: - -- There are two types of encodings for integer types: `num` and `bv` (bit vector). - If an integer (either a constant or a variable) is not involved in any bitwise operations directly or indirectly, - regardless of its type in Move (`u8`, `u16`, `u32`, `u64`, `u128` and `u256`), it is treated as the same type. In - specifications, this type is called `num`, which is an arbitrary precision *signed* integer type. - When MSL refers to a Move name that represents an `u8` or such, it will be automatically widened - to `num`. This allows writing MSL expressions like `x + 1 <= MAX_U128` or `x - y >= 0` without - needing to worry about overflow or underflow. - Different from `num`, `bv` cannot and does not need to be explicitly used in specifications: if an integer is involved in bitwise operations such as `&`, `|` or `^`, it will be automatically encoded as `bv`at the backend. - Moreover, a `bv` integer has a fixed precision, which is consistent with its precision in Move (`bv8`, `bv16`, `bv32`, `bv64`, `bv128` and `bv256`). - Note that, in general using `bv` is not so efficient as `num` in the [SMT](https://en.wikipedia.org/wiki/Satisfiability_modulo_theories) solver such as [Z3](https://github.com/Z3Prover/z3). Consequently, - the Move Prover has some restrictions when using bitwise operations, which are stated in detail below. -- The Move types `&T`, `&mut T`, and `T` are considered equivalent for MSL. Equality is interpreted - as value equality. There is no need to worry about dereferencing a reference from the Move - program: these are automatically dereferenced as needed. This simplification is possible because - MSL cannot modify values from a Move program, and the program cannot directly reason about - reference equality (which eliminates the need for doing so in MSL). (Note there is also a - restriction in expressiveness coming with this, namely - for [functions which return `&mut T`](#expressiveness). However, this is rarely hit in practice, - and there are workarounds.) -- There is the additional type `type`, which is the type of all types. It can be used only in - quantifiers. -- There is the additional type `range`, which represents an integer range (and the notation `n..m` to - denote a value). - -## Naming - -Name resolution in MSL works similar to the Move language. `use` declarations can introduce aliases for -imported names. MSL functions and variable names must start with a lowercase letter. Schema names -are treated like types and must start with a capital letter. ([Schemas](#schemas) are a named construct -discussed later). - -Move functions, MSL functions, Move types, and schemas all share the same namespace and are -therefore unambiguous if aliased via a Move `use` clause. Because of the common name space, an MSL -function cannot have the same name as a Move function. This is often handled via the convention to -prefix MSL functions as in `spec_has_access` when the related Move function is called `has_access`. - -## Operators - -All Move operators are supported in MSL, except `&`, `&mut`, and `*` (dereference). - -In addition to the existing operators, vector subscript `v[i]`, slicing `v[i..j]`, and range -construction `i..j` are supported (the type of integer ranges is a new builtin type called `range`). Moreover, boolean implication `p ==> q` is supported as a more intuitive form than `!p || q`. - -## Function calls - -In MSL expressions, functions can be called like in Move. However, the callee must either be -an [MSL helper function](#helper-functions) or a **pure** Move function. - -Move functions are considered pure if they do not modify global state and do not use Move expression -features that are not supported in MSL expressions (as defined in this document). - -There is one extension. If a Move function definition contains a direct `assert`, this will be -ignored when it is called from an MSL expression, and the function will be considered pure. For -example: - -```move -fun get(addr: address): &T { assert(exists(addr), ERROR_CODE); borrow_global(addr) } -``` - -This function is pure and can be called from an MSL expression. The assertion will be ignored, and -the function will be interpreted as: - -```move -spec fun get(addr: address): T { global(addr) } -``` - -This is justified by that MSL having [*partial semantics*](#partial-semantics). - -## Statements - -Limited sequencing of the form `{ let x = foo(); x + x }` is supported, as well as if-then-else. -Other statement forms of the Move language are not supported. - -## Pack and unpack - -Pack expressions are supported. Unpack expressions are currently *not* supported. - -## Quantifiers - -Universal and existential quantification is supported. The general form is: - -``` -forall , ..., [ where ] : -exists , ..., [ where ] : -``` - -- Bindings can either be of the form `name: ` or `name in `. For the second form, the - expression must either be a `range` or a vector. -- The optional constraint `where ` allows to restrict the quantified range. `forall x: T where p: q` - is equivalent to `forall x: T : p ==> q` and `exists x: T where p: q` is equivalent to `exists x: T : p && q`. - -Notice that it is possible to quantify over types. For example: - -``` -forall t: type, addr: address where exists>(addr): exists>(addr) -``` - -## Choice operator - -The choice operator allows selecting a value that satisfies a predicate: - -``` -choose a: address where exists(a) && global(a).value > 0 -``` - -If the predicate is not satisfiable, the result of the choice will be undetermined. (See [partial semantics](#partial-semantics)). - -The choice also comes in a form to select the *minimal* value from a set of integers, as in: - -``` -choose min i: num where in_range(v, i) && v[i] == 2 -``` - -## Cast operator - -In the specification language, we can use the same syntax `(e as T)` to cast an expression `e` with one integer type to `T`, an integer type of another size. - -## Shift operator - -Shift operators `<<` and `>>` are supported in the specification language, and both of them have the same semantics with the Move language. As for abort, if a value `v` has width `n`, then `v << m` or `v >> m` will abort if `m >= n`. - -## Bitwise operators - -Move programs using bitwise operators `&`, `|` and `^` can be verified in the prover, and these operators are also supported in the specification language. -Due to encoding and efficiency issues, using bitwise operators has more caveats: - -- Integers involved in bitwise operations are encoded as `bv` types at the backend, and two encodings of integers are not compatible. For instance, if a variable `v` is involved in a bitwise operation such as `v & 2` or `v = a ^ b`, then when it is used in an arithmetic operation `v * w` or a shift operation `v << w`, `w` will be implicitly cast to a `bv` type in the Move program. -However, the specification language does not support implicit type cast so users must explicitly use the built-in function `int2bv` in the specification: `v << int2bv(w)`. -Not that since each `bv` type has a fixed length (from 8 to 256), values with type `num` cannot be converted into `bv`. - -- Verification of `bv` types is not efficient and may lead to timeout. As a result, users may prefer isolating bitwise operations from other operations and not using `int2bv` if possible. Moreover, users need to use pragmas to explicitly specify which integer-typed function arguments or struct fields will be used in bitwise computations: - -```move - struct C has drop { - a: u64, - b: u64 - } - spec C { - // b, the second field of C, will be of bv type - pragma bv=b"1"; - } - public fun foo_generic(i: T): T { - i - } - - spec foo_generic { - // The first parameter will be of bv type if T is instantiated as a number type - pragma bv=b"0"; - // The first return value will be of bv type if T is instantiated as a number type - pragma bv_ret=b"0"; - } - - public fun test(i: C): u64 { - let x1 = foo_generic(C.b); - x1 ^ x1 - } - - spec test { - // Explicit type cast is mandatory for generating correct boogie program - ensures result == (0 as u64); - } -``` - -Note that if arguments or fields of a generic function or struct are specified with `bv` types, -they will be of `bv` types in all instances of the function or the struct when the instantiated type is an integer type. - - -- Values with integer types in vectors and tables can be encoded as `bv` types; indices and keys in tables cannot be `bv` types for now. Using other types will lead to internal errors. - -## Built-in functions - -MSL supports a number of built-in constants and functions. Most of them are not available in the Move -language: - -- `MAX_U8: num`, `MAX_U64: num`, `MAX_U128: num` returns the maximum value of the corresponding - type. -- `exists(address): bool` returns true if the resource T exists at address. -- `global(address): T` returns the resource value at address. -- `len(vector): num` returns the length of the vector. -- `update(vector, num, T>): vector` returns a new vector with the element replaced at the - given index. -- `vec(): vector` returns an empty vector. -- `vec(x): vector` returns a singleton vector. -- `concat(vector, vector): vector` returns the concatenation of the parameters. -- `contains(vector, T): bool` returns true if element is in vector. -- `index_of(vector, T): num` returns the index of the element in the vector, or the length of - the vector if it does not contain it. -- `range(vector): range` returns the index range of the vector. -- `in_range(vector, num): bool` returns true if the number is in the index range of the - vector. -- `in_range(range, num): bool` returns true if the number is in the range. -- `update_field(S, F, T): S` updates a field in a struct, preserving the values of other fields, - where `S` is some struct, `F` the name of a field in `S`, and `T` a value for this field. -- `old(T): T` delivers the value of the passed argument at point of entry into a Move function. This - is allowed in - `ensures` post-conditions, - inline spec blocks (with additional restrictions), and - certain forms of invariants, as discussed later. -- `TRACE(T): T` is semantically the identity function and causes visualization of the argument's - value in error messages created by the prover. -- `int2bv(v)` explicitly converts an integer `v` into its `bv` representation. -- `bv2int(b)` explicitly converts a 'bv' integer 'b' into the `num` representation. However it is not encouraged to use it due to efficiency issue. - -Built-in functions live in an unnamed outer scope of a module. If the module defines a function `len`, -then this definition will shadow that of the according built-in function. To access the built-in -function in such a situation, one can use the notation `::len(v)`. - -## Partial semantics - -In MSL, expressions have partial semantics. This is in contrast to Move program expressions, which -have total semantics, since they either deliver a value or abort. - -An expression `e[X]` that depends on some variables `X` may have a known interpretation for -some assignments to variables in `X` but is unknown for others. An unknown interpretation for a -sub-expression causes no issue if its value is not needed for the overall expression result. -Therefore it does not matter if we say `y != 0 && x / y > 0` -or `x / y > 0 && y != 0`: boolean operators are commutative. - -This basic principle inherits to higher-level language constructs. For example, in specifications, -it does not matter in which order conditions are supplied: `aborts_if y != 0; ensures result == x / y;` is the same as -`ensures result == x / y; aborts_if y != 0;`. Also, `aborts_if P; aborts_if Q;` is the same -as `aborts_if Q || P` -. - -Moreover, the principle of partial semantics is inherited to [specification helper functions](#helper-functions), which behave transparently. Specifically, inlining those functions is equivalent to calling them (call-by-expression parameter passing semantics). - -# Specifications - -Specifications are contained in so-called *specification blocks* (abbreviated **spec block**) that -can appear as module members and inside Move functions. The various types of spec blocks are shown -below, and will be discussed in subsequent sections. - -```move -module addr::M { - struct Counter has key { - value: u8, - } - - public fun increment(a: address) acquires Counter { - let r = borrow_global_mut(a); - spec { - // spec block targeting this code position - ... - }; - r.value = r.value + 1; - } - - spec increment { - // spec block targeting function increment - ... - } - - spec Counter { - // spec block targeting struct Counter - ... - } - - spec schema Schema { - // spec block declaring a schema - ... - } - - spec fun f(x: num): num { - // spec block declaring a helper function - ... - } - - spec module { - // spec block targeting the whole module - ... - } -} -``` - -Apart of spec blocks inside Move functions, the textual position of spec block is irrelevant. Also, -a spec block for a struct, function, or module can be repeated multiple times, accumulating the -content. - -## Separating specifications - -Instead of putting specifications into the same module as the regular Move definitions, one can also -put them into a separate "specification" module, which can live in the same or a different file: - -```move -module addr::M { - ... -} -spec addr::M { - spec increment { .. } -} -``` - -The syntax of a specification module is the same as for a regular module; however, Move functions -and structures are not allowed. - -A specification module must be compiled together with the Move module it is targeting and cannot be -compiled and verified standalone. - -In case Move definitions are far apart (e.g. in different files), it is possible to augment the -specification of a Move function with a signature of this function to give sufficient context to -understand the specification. This syntax is optionally enabled in regular and in specification -modules: - -```move -public fun increment(a: address) acquires Counter { .. } -... -spec increment(a: address) { .. } -``` - -## Pragmas and properties - -Pragmas and properties are a generic mechanism to influence interpretation of specifications. They -are also an extension point to experiment with new concepts before they become part of the mainstream -syntax. Here we give a brief introduction into their general syntax; individual instances are -discussed later. - -The general form of a pragma is: - -```move -spec .. { - pragma = ; -} -``` - -The general form of a property is: - -```move -spec .. { - [ = ] ; // ensures, aborts_if, include, etc.. -} -``` - -The `` can be any value supported by MSL (or the Move language). A value assignment can -also be omitted, in which case a default is used. For example, it is common to use `pragma option;` -as a shortcut for `pragma option = true;`. - -Instead of a single pragma or property, a list can also be provided, as in `invariant [global, isolated] P`. - -### Pragma inheritance - -A pragma in a module spec block sets a value that applies to all other spec blocks in the module. A -pragma in a function or struct spec block can override this value for the function or struct. -Furthermore, the default value of some pragmas can be defined via the prover configuration. - -As an example, we look at the `verify` pragma. This pragma is used to turn verification on or off. - -```move -spec module { - pragma verify = false; // By default, do not verify specs in this module ... -} - -spec increment { - pragma verify = true; // ... but do verify this function. - ... -} -``` - -### General pragmas and properties - -A number of pragmas control general behavior of verification. Those are listed in the table below. - -| Name | Description | -|----------------------------------|-------------- -| `verify` | Turns on or off verification. -| `intrinsic` | Marks a function to skip the Move implementation and use a prover native implementation. This makes a function behave like a native function even if it not so in Move. -| `timeout` | Sets a timeout (in seconds) for function or module. Overrides the timeout provided by command line flags. -| `verify_duration_estimate` | Sets an estimate (in seconds) for how long the verification of function takes. If the configured `timeout` is less than this value, verification will be skipped. -| `seed` | Sets a random seed for function or module. Overrides the seed provided by command line flags. - -The following properties control general behavior of verification: - -| Name | Description | -|------------|-------------- -| `[deactivated]` | Excludes the associated condition from verification. - -## Pre and post state - -Multiple conditions in spec blocks work with a *pre* and *post* state, relating them to each other. -Function specifications are one example of this: in the `ensures P` condition, the pre-state (at -function entry) and the post-state (at function exit) are related via the predicate `P`. However, -the concept is more general and also applied for invariants, where the pre-state is before and -post-state after a global update. - -In contexts where a pre/post-state is active, expressions are evaluated implicitly in the -post-state. To evaluate an expression in a pre-state, one uses the built-in function `old(exp)`, -which evaluates its parameter in the pre-state and returns its value. It is important to understand -that every sub-expression in `exp` is computed in the pre-state as well, including calls to helper -functions. - -The 'state' in question here consists of assignments to global resource memory, as well as to any -parameters of the function of type `&mut T`. Examples: - -```move -fun increment(counter: &mut u64) { *counter = *counter + 1 } -spec increment { - ensures counter == old(counter) + 1; -} - -fun increment_R(addr: address) { - let r = borrow_global_mut(addr); - r.value = r.value + 1; -} -spec increment_R { - ensures global(addr).value == old(global(addr).value) + 1; -} -``` - -## Helper functions - -MSL allows defining helper functions. Those functions can then be used in expressions. - -Helper functions are defined using the following syntax: - -```move -spec fun exists_balance(a: address): bool { exists>(a) } -``` - -As seen in the example, helper functions can be generic. Moreover, they can access global state. - -Definitions of helper functions are neutral regarding whether they apply to -a [pre- or post-state](#pre-and-post-state). They are evaluated in the currently active state. For -instance, in order to see whether a balance existed in the pre-state, one -uses `old(exists_balance(a))`. Consequently, the expression `old(..)` is not allowed -within the definition of a helper function. - -Helper functions are partial functions; see the discussion of [partial semantics](#partial-semantics). - -### Uninterpreted functions - -A helper function can be defined as **uninterpreted** by simply omitting its body: - -```move -spec fun something(x: num): num; -``` - -An uninterpreted function is one the prover is allowed to assign some arbitrary meaning to, as long -as it is consistent within a given verification context. Uninterpreted functions are a useful tool -for abstraction in specifications (see also [abstract specifications](#abstract-specifications)). - -### Axioms - -The meaning of helper functions can be further constrained by using **axioms**. Currently, axioms -must be contained in module spec blocks: - -```move -spec module { - axiom forall x: num: something(x) == x + 1; -} -``` - -Axioms should be used with care as they can introduce unsoundness in the specification logic via -contradicting assumptions. The Move Prover supports a smoke test for detecting unsoundness via -the `--check-inconsistency` flag. - -## Let bindings - -A spec block can contain let bindings that introduce names for expressions: - -```move -fun get_R(account: signer): R { ... } -spec get_R { - let addr = signer::spec_address_of(account); - aborts_if addr != ROOT; - ensures result == global(addr); -} -``` - -In a spec block that has a pre and post-state (like a function specification), the `let name = e` -form will evaluate `e` in the pre-state. In order to evaluate an expression in the post-state, use `let post name = e`. In the rhs expression of this form, one can use `old(..)` to refer to the -pre-state. - -## Aborts_if condition - -The `aborts_if` condition is a spec block member that can appear only in a function context. It -specifies conditions under which the function aborts. - -In the following example, we specify that the function `increment` aborts if the `Counter` resource -does not exist at address `a` (recall that `a` is the name of the parameter of `increment`). - -```move -spec increment { - aborts_if !exists(a); -} -``` - -If a function has more than one `aborts_if` condition, those conditions are or-ed with each other. -The evaluation of the combined aborts condition (or-ed from each individual condition) depends on -the value of the pragma `aborts_if_is_partial`. If this value is false (the default), the function -aborts *if and only if* the combined aborts condition is true. In this case, the above aborts -specification for `increment` will lead to a verification error, since there are additional -situations where `increment` can abort, namely if incrementing `Counter.value` would lead to an -overflow. To fix this, the specification can be completed like this: - -```move -spec increment { - pragma aborts_if_is_partial = false; // This is the default, but added here for illustration. - aborts_if !exists(a); - aborts_if global(a).value == 255; -} -``` - -If the value of `aborts_if_is_partial` is true, the combined aborts condition (the or-ed individual -conditions) only *implies* that the function aborts. Formally, if `A` is the combined aborts condition, then -with `aborts_if_is_partial = true`, we have `A ==> function_aborts`; otherwise we have -`A <==> function_aborts`. Therefore, the following does verify: - -```move -spec increment { - pragma aborts_if_is_partial = true; - aborts_if !exists(a); -} -``` - - -> Note that there is a certain risk in setting `aborts_if_is_partial` to true, and best practice is to avoid it in specifications of public functions and transaction scripts once those are considered finalized. This is because changing the code after finalization of the spec can add new (non-trivial, undesired) abort situations the original specification did not anticipate yet will nevertheless silently pass verification. - -If no aborts condition is specified for a function, abort behavior is unspecified. The function may -or may not abort, and verification will not raise any errors, whether `aborts_if_is_partial` is set -or not. In order to state that a function never aborts, use `aborts_if false`. One can use the -pragma `aborts_if_is_strict` to change this behavior; this is equivalent to an `aborts_if false` being added to each function that does not have an explicit `aborts_if` clause. - -### Aborts_if condition with code - -The `aborts_if` condition can be augmented with code: - -``` -fun get_value(addr: address): u64 { - aborts(exists(addr), 3); - borrow_global(addr).value -} -spec get_value { - aborts_if !exists(addr) with 3; -} -``` - -It is a verification error if the above function does not abort with code `3` under the given -condition. - -In order to specify a direct VM abort, one can use the special constant `EXECUTION_FAILURE`: - -``` -fun get(addr: address): &Counter acquires Counter { - borrow_global(addr) -} -spec get { - aborts_if !exists(addr) with EXECUTION_FAILURE; -} -``` - -This same constant can be used for all other VM failures (division by zero, overflow, etc.). - -## Aborts_with condition - -The `aborts_with` condition allows specifying with which codes a function can abort, independent -under which condition. It is similar to a 'throws' clause in languages like Java. - -```move -fun get_one_off(addr: address): u64 { - aborts(exists(addr), 3); - borrow_global(addr).value - 1 -} -spec get_one_off { - aborts_with 3, EXECUTION_FAILURE; -} -``` - -If the function aborts with any other or none of the specified codes, a verification error will be -produced. - -The `aborts_with` condition can be combined with `aborts_if` conditions. In this case, the `aborts_with` -specifies any other codes with which the function may abort, in addition to the ones given in the `aborts_if`: - -```move -spec get_one_off { - aborts_if !exists(addr) with 3; - aborts_with EXECUTION_FAILURE; -} -``` - -If this is not wanted, and the `aborts_with` should be independent of `aborts_if`, one can use the -property `[check]`: - -```move -spec get_one_off { - aborts_if !exists(addr) with 3; - aborts_if global(addr) == 0 with EXECUTION_FAILURE; - - aborts_with [check] 3, EXECUTION_FAILURE; -} -``` - -## Requires condition - -The `requires` condition is a spec block member that postulates a pre-condition for a function. The -Move Prover will produce verification errors for functions that are called with violating -pre-conditions. - -A `requires` is different from an `aborts_if`: in the latter case, the function can be called, and -any aborts it produces will be propagated to the caller context. In the `requires` case, the Move Prover -will not allow the function to be called in the first place. Nevertheless, the function can *still -be called at runtime* if verification is skipped. Because of this, `requires` are rare in Move -specifications, and `aborts_if` are more common. Specifically, `requires` should be avoided for public APIs. - -An example of `requires` is: - -```move -spec increment { - requires global(a).value < 255; -} -``` - -## Ensures condition - -The `ensures` condition postulates a post-condition for a function that must be satisfied when the -function terminates successfully (i.e. does not abort). The Move Prover will verify each `ensures` to -this end. - -An example for the `ensures` condition is the following: - -```move -spec increment { - ensures global(a) == old(global(a)) + 1; -} -``` - -Within the expression for the `ensures` condition, one can use the `old` function, as discussed in -[Pre and post state](#pre-and-post-state). - -## Modifies condition - -The `modifies` condition is used to provide permissions to a function to modify global storage. The -annotation itself comprises a list of global access expressions. It is specifically used together -with [opaque function specifications](#opaque-specifications). - -```move -struct S has key { - x: u64 -} - -fun mutate_at(addr: address) acquires S { - let s = borrow_global_mut(addr); - s.x = 2; -} -spec mutate_at { - pragma opaque; - modifies global(addr); -} -``` - -In general, a global access expression has the form `global(address_expr)`. The -address-valued expression is evaluated in the pre-state of the annotated function. - -```move -fun read_at(addr: address): u64 acquires S { - let s = borrow_global(addr); - s.x -} - -fun mutate_S_test(addr1: address, addr2: address): bool acquires T { - assert(addr1 != addr2, 43); - let x = read_at(addr2); - mutate_at(addr1); // Note we are mutating a different address than the one read before and after - x == read_at(addr2) -} -spec mutate_S_test { - aborts_if addr1 == addr2; - ensures result == true; -} -``` - -In the function `mutate_S_test`, the assertion in the spec block is expected to hold. A benefit of -the modifies specification on `mutate_at` is that this assertion can be proved whether or -not `mutate_at` is inlined. - -If the modifies annotation is omitted on a function, then that function is deemed to have all -possible permissions for those resources it may modify during its execution. The set of all -resources that may be modified by a function is obtained via an interprocedural analysis of the -code. In the example above, `mutate_S_test` does not have a modifies specification and modifies -resource `S` via the call to `mutate_at`. Therefore, it is considered to have modified `S` at any -possible address. Instead, if the programmer adds `modifies global(addr1)` -to the specification of `mutate_S_test`, then the call to `mutate_at` is checked to make sure that -modify permissions granted to `mutate_S_test` cover the permissions it grants to `mutate_at`. - -## Invariant condition - -The invariant condition can be applied on structs and on global level. - -### Function invariants - -The `invariant` condition on a function is simply a shortcut for a `requires` and `ensures` with the -same predicate. - -Thus the following spec block: - -```move -spec increment { - invariant global(a).value < 128; -} -``` - -... is equivalent to: - -```move -spec increment { - requires global(a).value < 128; - ensures global(a).value < 128; -} -``` - -### Struct invariants - -When the `invariant` condition is applied to a struct, it expresses a well-formedness property of -the struct data. Any instance of this struct that is currently not mutated will satisfy this -property (with exceptions as outlined below). - -For example, we can postulate an invariant on our counter that it never must exceed the value of -127: - -```move -spec Counter { - invariant value < 128; -} -``` - -A struct invariant is checked by the Move Prover whenever the struct value is constructed (packed). While -the struct is mutated (e.g. via a `&mut Counter`) the invariant does *not* hold (but see exception -below). In general, we consider mutation as an implicit unpack, and end of mutation as a pack. - -The Move language semantics unambiguously identifies the point when mutation ends and starts. This -follows from the borrow semantics of Move and includes mutation via an enclosing struct. -(The mutation of an inner struct ends when the mutation of the root struct where mutation started -ends.) - -There is one exception to this rule. When a mutable reference to a struct declared in module M is -passed into a *public* function of M which does by itself *not* return any other mutable reference (which could be borrowed from the input parameter), we treat this parameter as "packed". That means, on function entry, we will unpack it and on function exit we will pack again, enforcing the invariant. This reflects that in Move, struct data can be mutated only within the module that declares the struct; so for an outside caller of the public function, the mutable reference can actually not be mutated unless by calling public functions of module M again. It is a significant simplification of the verification problem to exploit this in the semantics. - -### Global invariants - -A global invariant appears as a member of module. It can express a condition over the global state -of the Move program, as represented by resources stored in memory. For example, the below invariant -states that a `Counter` resource stored at any given address can never be zero: - -```move -module addr::M { - invariant forall a: addr where exists(a): global(a).value > 0; -} -``` - -A global invariant is assumed to hold when data is read from the global state, and is asserted (and -may fail to verify) at the moment the state is updated. For example, the below function will never abort with arithmetic underflow because the counter value is always greater than zero; however, it will create a verification error since the counter can drop to zero: - -``` -fun decrement_ad(addr: address) acquires Counter { - let counter = borrow_global_mut(addr); - let new_value = counter.value - 1; // Will not abort because counter.value > 0 - *counter.value = new_value; // Fails verification since value can drop to zero -} -``` - -#### Disabling invariants - -There are times when a global invariant holds almost everywhere, except for a brief interval inside a function. In current Move code, this often occurs when something (e.g. an account) is being set up and -several structs are published together. Almost everywhere, an invariant holds that all the structs are published or none of them are. But the code that publishes the structs must do so sequentially. While the structs are being published, there will be a point where some are published and others are not. - -In order to verify invariants that hold except during small regions, there is a feature to allow users to disable invariants temporarily. Consider the following code fragment: - -```move -fn setup() { - publish1(); - publish2(); - } -} -``` -where `publish1` and `publish2` publish two different structs, `T1` and `T2` at address `a`. - -```move -module addr::M { - invariant [global] exists(a) == exists(a) -} -``` - -As written, the Move Prover will report that the invariant is violated after the call to `publish1` and before the call to `publish2`. If either of `publish1` or `publish2` is without the other, the Move Prover -will also report a violation of the invariant. - -By default, a global invariant is checked immediately after the instruction `I` that touches the resources mentioned in the global invariant. The `[suspendable]` attribute (at the invariant side) together with two pragmas (specified in function spec block) provide fine-grained control on where we hope this invariant to be checked: - -- `disable_invariants_in_body`: the invariant will be checked at the end of the function where `I` resides. -- `delegate_invariants_to_caller`: the invariant will be checked by all callers of the function where `I` resides. - -For the example above, we can add the pragma `disable_invariants_in_body`: -```move -spec setup { - pragma disable_invariants_in_body; -} -``` -which says that invariants are not required to hold while `setup` is executing but are assumed to hold on entry to and exit from `setup`. - -This pragma changes the Move Prover's behavior. The invariants are assumed on entry to `setup` but not proved during or after `publish1` and `publish2`. Instead, all invariants that could be invalidated in the -body of `setup` are asserted and proved at the point of return from `setup`. A consequence of this processing is that the user may need to provide stronger post-conditions on `publish1` and `publish2` to -make it possible to prove the invariants on exit from `setup`. - -Another consequence of this processing is that invariants cannot safely be assumed to hold during the execution of `publish1` and `publish2` (unless nothing in the body of `setup` changes state -mentioned in the invariant). Therefore, if proving a post-condition requires the invariant to be assumed, the post-condition will fail. - -In the example, invariants hold at the call sites of `setup` but not in the body. For `publish1`, invariants don't necessarily hold at the call site *or* in the body of the function. In the example, that -behavior is implied because `publish1` is called in a context where invariants are disabled. - -When invariants are disabled in `setup` in the above example, the Move Prover cannot assume them on entry to `publish1` and `publish2` and should not try to prove them on exit from those functions. The Move Prover -would have the same behavior for any functions called by `publish1` or `publish2`. The Move Prover *automatically* adopts this behavior when invariants are disabled in a calling function, but it is possible for the user to declare that a function be treated like `publish1`. - -For example, if `publish2` is *only* called from the setup function above, and we did *not* disable invariants in `setup`, we could achieve a similar effect by using the pragma `delegate_invariants_to_caller`, instead. - -```move -spec setup { - pragma delegate_invariants_to_caller; -} -``` -This would be legal only if `setup` is a private or `public (friend)` function. The difference between this and disabling invariants in `setup` is that the invariants would not be assumed at the beginning of `setup` and would be proved after `setup` returns at each site where it is called. - -While both pragmas disable invariants in the body of a function, the difference is that `disable_invariants_in_body` assumes invariants on entry and proves them on exit, while `delegate_invariants_to_caller` does neither. - -There are some limitations on how these pragmas can be used. `disable_invariants_in_body` cannot be declared for functions where invariants are delegated to a caller, either explicitly via the pragma -or implicitly because the function is called in a context where invariants have been disabled. (This restriction is to ensure consistent processing, because on pragma assumes that invariants hold -in the calling context and the other does not). Second, it is illegal for a public or script function to delegate invariant checking to its callers (since the Move Prover does not know all the call sites), *unless* the function cannot possibly invalidate an invariant because it doesn't change any of the state mentioned in `exists` and `global` expressions appearing in the invariant. - -#### Update invariants - -The `update` form of a global invariant allows to express a relation between [pre-state and post-state](#pre-and-post-state) of a global state update. For example, the following invariant states that the counter must decrease monotonically whenever it is updated: - -```move -module addr::M { - invariant update [global] forall a: addr where old(exists(a)) && exists(addr): - global(a).value <= old(global(a)); -} -``` - -#### Isolated global invariants - -A global invariant can be marked as `[isolated]` to indicate that it is not relevant for proving -other properties of the program. An isolated global invariant will not be assumed when the related -global state is read. It will only be assumed before the state is updated to help prove that the -invariant still holds after the update. This feature is for improving performance in situations -where there are many global invariants, but they have no direct influence on verification. - -#### Modular verification and global invariants - -Certain usage of global invariants leads to verification problems that cannot be checked in a modular fashion. "Modular" here means that a module can be verified standalone and proven to be universally correct in all usage contexts (if preconditions are met). - -A non-modular verification problem may arise if a global invariant refers to state from multiple modules. Consider a situation where module `M1` uses module `M2`, and `M1` contains the following invariant, with the helper function `condition` referring to global state of each respective module: - -```move -module addr::M1 { - invariant M1::condition() ==> M2::condition(); -} -``` - -When we verify `M1` standalone, the Move Prover will determine that it also needs to verify functions in `M2`, namely those which update the M2 memory such that the invariant in M1 can fail. - -## Assume and assert conditions in code - -A spec block might also occur anywhere an ordinary Move statement block can occur. -Here is an example: - -``` -fun simple1(x: u64, y: u64) { - let z; - y = x; - z = x + y; - spec { - assert x == y; - assert z == 2*x; - } -} -``` - -In such inline spec blocks, only a subset of conditions are permitted: -- `assume` and `assert` statements are allowed in any code locations. -- loop `invariant` statements are allowed only in code locations that represent loop headers. - -An assert statement inside a spec block indicates a condition that must hold when control reaches -that block. If the condition does not hold, an error is reported by the Move Prover. An `assume` -statement, on the other hand, blocks executions violating the condition in the statement. The -function `simple2` shown below is verified by the Move Prover. However, if the first spec block -containing the assume statement is removed, Move Prover will show a violation to the `assert` -statement in the second spec block. - -``` -fun simple2(x: u64, y: u64) { - let z: u64; - spec { - assume x > y; - }; - z = x + y; - spec { - assert z > 2*y; - } -} -``` - -### Loop invariants - -An `invariant` statement encodes a loop invariant and must be placed at a loop head, as in the -following example: - -``` -fun simple3(n: u64) { - let x = 0 - loop { - spec { - invariant x <= n; - }; - if (x < n) { - x = x + 1 - } else { - break - } - }; - spec { - assert x == n; - } -} -``` - -A loop invariant is translated into two `assert` statements and one `assume` statement to facilitate the inductive reasoning of properties about the loop. In break down, a loop invariant is translated to: -- An `assert` statement that confirms the invariant holds when the loop is first encountered in the - execution -- establishing the base case. -- An `assume` statement that encodes the property that the invariant holds at loop iteration `I`. -- An `assert` statement that checks whether the invariant continues to hold at loop iteration `I+1`. - -### Referring to pre-state - -Occasionally, we would like to refer to the pre-state of a mutable function argument in inline spec -blocks. In MSL, this can be done with the `old(T)` expression. Similar to the semantics of `old(..)` -in post conditions, an `old(T)` expression in an `assume` or `assert` statement always yields the -value of `T` at the function entry point. Here is an example that illustrate the use of -`old(..)` in an inline spec block: - -``` -fun swap(x: &mut u64, y: &mut u64) { - let t = *x; - *x = *y; - *y = t; - spec { - assert x == old(y); - assert y == old(x); - }; -} -``` - -The above example is trivial as the same property can be expressed with post conditions -(i.e., `ensures`) too. But there are cases where we must use `old(..)` to refer to the pre-state, especially in the specification of loop invariants. Consider the following example -where we verify that the `vector_reverse` function properly reverses the order of all elements -in a vector: - -``` -fun verify_reverse(v: &mut vector) { - let vlen = vector::length(v); - if (vlen == 0) return (); - - let front_index = 0; - let back_index = vlen -1; - while ({ - spec { - assert front_index + back_index == vlen - 1; - assert forall i in 0..front_index: v[i] == old(v)[vlen-1-i]; - assert forall i in 0..front_index: v[vlen-1-i] == old(v)[i]; - assert forall j in front_index..back_index+1: v[j] == old(v)[j]; - assert len(v) == vlen; - }; - (front_index < back_index) - }) { - vector::swap(v, front_index, back_index); - front_index = front_index + 1; - back_index = back_index - 1; - }; -} -spec verify_reverse { - aborts_if false; - ensures forall i in 0..len(v): v[i] == old(v)[len(v)-1-i]; -} -``` - -Note the usage of `old(v)` in the loop invariants. Without them, it is hard to express the -invariant that the vector is partially reversed while the loop is iterating and the rest -remain unchanged. - -However, unlike the `old(T)` expressions in `ensures` conditions where `T` can be any valid -expression (e.g., `old(v[i])` is allowed), the `old(T)` expressions in `assert` and `assumes` -statements accept only a single variable as `T` and that variable must be a function argument of -a mutable reference type. In the above example, `old(v[i])` is not allowed, and we should use -`old(v)[i]` instead. - -## Specification variables - -MSL supports *spec variables*, also called *ghost variables* in the verification community. These -variables are used only in specifications and represent information derived from the global state of -resources. An example use case is to compute the sum of all coins available in the system and -specify that the sum can be changed only in certain scenarios. - -We illustrate this feature by introducing a spec variable that maintains the sum of all `Counter` -resources from our running example. First, a spec variable is introduced via spec module block as -follows: - -```move -spec module { - global sum_of_counters: num; -} -``` - -This value is going to be updated whenever a `Counter` is packed or unpacked. (Recall that mutation -is interpreted as an implicit unpack and pack): - -```move -spec Counter { - invariant pack sum_of_counters = sum_of_counters + value; - invariant unpack sum_of_counters = sum_of_counters - value; -} -``` -> TODO: `invariant pack` and `invariant unpack` are currently not implemented - -Now we may for example want to specify that the sum of all Counter instances in the global state -should never exceed a particular value. We can do this as follows: - -```move -spec module { - invariant [global] sum_of_counters < 4711; -} -``` - -Note that spec variables can also be referenced from helper functions. Moreover, spec variables can -be generic: - -```move -spec module { - global some_generic_var: num; -} -``` - -When using such a spec variable, a type parameter must be provided, as in `some_generic_var`. Effectively, a generic spec variable is like a family of variables indexed by types. - -## Schemas - -Schemas are a means for structuring specifications by grouping properties together. Semantically, -they are just syntactic sugar that expand to conditions on functions, structs, or modules. - -### Basic Schema Usage - -Schemas are used as such: - -```move -spec schema IncrementAborts { - a: address; - aborts_if !exists(a); - aborts_if global(a).value == 255; -} - -spec increment { - include IncrementAborts; -} -``` - -Each schema may declare a number of typed variable names and a list of conditions over those -variables. All supported condition types can be used in schemas. The schema can then be included in -another spec block: - -- If that spec block is for a function or a struct, all variable names the schema declares must be - matched against existing names of compatible type in the context. -- If a schema is included in another schema, existing names are matched and must have the same type, - but non-existing names will be added as new declarations to the inclusion context. - -When a schema is included in another spec block, it will be checked whether the conditions it -contains are allowed in this block. For example, including the schema `IncrementAborts` into a -struct spec block will lead to a compile-time error. - -When a schema is included, the names it declares can also bound by expressions. For example, one can -write `include IncrementAborts{a: some_helper_address()}`. Effectively, not providing a binding is -equivalent to writing `IncrementAborts{a: a}` if `a` is an existing name in scope. - -Schemas can be generic. Generic schemas must be fully instantiated where they are included; type -inference is not available for schemas. - -### Schema expressions - -When a schema is included, one can use a limited set of Boolean operators as follows: - -- `P ==> SchemaExp`: all conditions in the schema will be prefixed with `P ==> ..`. Conditions that - are not based on Boolean expressions will be rejected. -- `if (P) SchemaExp1 else SchemaExp2`: this is treated similar to including both - `P ==> SchemaExp1` and `!P ==> SchemaExp2`. -- `SchemaExp1 && SchemaExp2`: this is treated as two includes for both schema expressions. - -### Schema apply operation - -One of the main use cases for schemas is to be able to name a group of properties and then apply -those to a set of functions. This is achieved by the `apply` operator. The `apply` spec block member -can appear only in module spec blocks. - -The general form of the apply operator is `apply Schema to FunctionPattern, .. except FunctionPattern, ..`. Here, `Schema` can be a schema name or a schema name plus formal type arguments. `FunctionPatterns` consists of an optional visibility modifier `public` or `internal` (if not provided, both visibilities will match), a name pattern in the style of a shell file pattern ( e.g. `*`, `foo*`, `foo*bar`, etc.), and finally an optional type argument list. All type arguments provided to `Schema` must be bound -in this list and vice versa. - -The `apply` operator includes the given schema in all function spec blocks that match the patterns, -except those excluded via the `except` patterns. - -A typical use of the `apply` operator is to provide common pre and post conditions to all functions in -a module with some exceptions. Example: - -```move -spec schema Unchanged { - let resource = global(ADDR): - ensures resource == old(resource); -} - -spec module { - // Enforce Unchanged for all functions except the initialize function. - apply Unchanged to * except initialize; -} -``` - -Notice that while with [global invariants](#global-invariants) we can express similar things, we *cannot* -express the restriction of the invariant to only specific functions. - -## Opaque specifications - -With the pragma `opaque`, a function is declared to be solely defined by its specification at caller -sides. In contrast, if this pragma is not provided, then the function's implementation will be used -as the basis to verify the caller. - -Using `opaque` requires the specification to be sufficiently complete for the verification problem at hand. Without `opaque`, the Move Prover will use the implementation as the source of truth for the definition of the function. But with `opaque`, if there is an aspect of the function definition unspecified, an arbitrary meaning will be assumed. For example, with the specification below, the `increment` function can abort under arbitrary conditions: - -```move -spec increment { - pragma opaque; - // aborts_if !exists(a); // We need to add this to make the function not abort arbitrarily - ensures global(a) == old(global(a)) + 1; -} -``` - -In general, `opaque` functions enable modular verification, as they abstract from the implementation -of functions, resulting in much faster verification. - -If an `opaque` function modifies state, it is advised to use the [`modifies` condition](#modifies-condition) in its specification. If this is omitted, verification of the state changes will fail. - -## Abstract specifications - -The `[abstract]` property allows specifying a function such that abstract semantics are used at the -caller side that is different from the actual implementation. This is useful if the implementation -is too complex for verification, and abstract semantics are sufficient for verification goals. -The `[concrete]` property, in turn, still allows specifying conditions that are verified against -the implementation but not used at the caller side. - -Consider the following example of a hash function. The actual value of the hash is not relevant for -verification of callers, and we use an [uninterpreted helper function](#uninterpreted-functions) delivering an arbitrary value chosen by the Move Prover. We can still specify the concrete implementation and verify its correctness: - -```move -fun hash(v: vector): u64 { - <>(v) -} -spec hash { - pragma opaque; - aborts_if false; - ensures [concrete] result == <>(v); - ensures [abstract] result == spec_hash_abstract(v); -} -spec fun abstract_hash(v: vector): u64; // uninterpreted function -``` - -The soundness of the abstraction is the responsibility of the specifier and not verified by the -Move Prover. - -> NOTE: The abstract/concrete properties should only be used with opaque specifications, but the Move Prover will currently not generate an error message even though they are not used with opaque specifications. - -> NOTE: The `modifies` clause does not currently support abstract/concrete. Also, if no modifies is given, the modified state will be computed from the implementation anyway, possibly conflicting with `[abstract]` properties. - -## Documentation generation - -The organization of specification blocks in a file is relevant for documentation generation -- even -though it is not for the semantics. - -# Expressiveness - -The Move specification language is expressive enough to represent the full Move language semantics (formal argument outstanding) with one exception: functions that return a `&mut T` type. - -Consider the following code: - -```move -struct S { x: u64, y: u64 } - -fun x_or_y(b: bool, s: &mut S): &mut u64 { - if (b) &mut s.x else &mut s.y -} -spec x_or_y { - ensures b ==> result == s.x; - ensures !b ==> result == s.y; -} -``` - -We are not able to specify the *full* semantics of `x_or_y` in MSL because we cannot capture the -semantics of mutable references. While we can say something about the value behind the reference at -function exit, subsequent effects as in `*x_or_y(b, &mut s) = 2` cannot be specified. - -However, the Move Prover *does* understand the meaning of such functions -- the restriction is only -in what we can specify. Practically, this means we cannot make the function `x_or_y` opaque and must -let verification rely on that the Move Prover directly works with the implementation. Specifically, we -can verify the following (which can then be opaque): - -```move -fun x_or_y_test(s: S): S { - *x_or_y(true, &mut s) = 2; - s -} -spec x_or_y_test { - pragma opaque; - ensures result.x == 2; - ensures result.y == s.y; -} -``` - -## Supporting resources - -* [Design by contract PRE_POST_REFERENCE](https://en.wikipedia.org/wiki/Design_by_contract) -* [APTOS_FRAMEWORK](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/overview.md) diff --git a/developer-docs-site/docs/move/prover/supporting-resources.md b/developer-docs-site/docs/move/prover/supporting-resources.md deleted file mode 100644 index a4d7b7d7024d2..0000000000000 --- a/developer-docs-site/docs/move/prover/supporting-resources.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -title: "Supporting Resources" -slug: "supporting-resources" ---- - -# Move Prover Supporting Resources - -## Standard Library and Framework Specifications -- [Move Stdlib](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/move-stdlib) -- [Aptos Stdlib](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/aptos-stdlib) -- [Aptos Framework](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/framework/aptos-framework) -- [Diem Framework](https://github.com/move-language/move/tree/main/language/documentation/examples/diem-framework/move-packages/DPN) - -## Examples - -- [`hello_prover` example](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/hello_prover) -- [`basic-coin` example](https://github.com/move-language/move/tree/main/language/documentation/examples/experimental/basic-coin) -- [`math-puzzle` example](https://github.com/move-language/move/tree/main/language/documentation/examples/experimental/math-puzzle) -- [`rounding-error` example](https://github.com/move-language/move/tree/main/language/documentation/examples/experimental/rounding-error) -- [`verify-sort` example](https://github.com/move-language/move/tree/main/language/documentation/examples/experimental/verify-sort) -- [Move Prover Examples by Zellic](https://github.com/zellic/move-prover-examples) - -## Tutorials - -- [The Move Tutorial, steps 7 and 8](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/move-tutorial#step-7--use-the-move-prover) -- [Verify Smart Contracts in Aptos with the Move Prover by MoveBit](https://www.movebit.xyz/blog/post/move-prover-tutorial-part-1.html) -- [The Move Prover: A Practical Guide by OtterSec](https://osec.io/blog/tutorials/2022-09-16-move-prover/) -- [Formal Verification, the Move Language, and the Move Prover by Certik](https://www.certik.com/resources/blog/2wSOZ3mC55AB6CYol6Q2rP-formal-verification-the-move-language-and-the-move-prover) -- [The Move Prover: Quality Assurance of Formal Verification by Certik](https://www.certik.com/resources/blog/1NygvVeqIwhbUk1U1q3vJF-the-move-prover-quality-assurance-of-formal-verification) - -## Presentations - -- [Verifying Smart Contracts with Move Prover by Wolfgang Grieskamp (video)](https://drive.google.com/file/d/1DpI-rQ25Kq1jqMGioLgVrG3YuCqJHVMm/view?usp=share_link) -- [Formal verification of Move programs for the Libra blockchain by David Dill (video)](http://www.fields.utoronto.ca/talks/Formal-verification-Move-programs-Libra-blockchain) -- [Move Prover - Best Practices & Tricks - A User’s Perspective by Xu-Dong@MoveBit (slides)](https://docs.google.com/presentation/d/1SuV0m5gGxSN9SaLdj9lLmTjspJ2xN1TOWgnwvdWbKEY/edit?usp=sharing) - -## Conference papers - -- Zhong, Jingyi Emma, Kevin Cheang, Shaz Qadeer, Wolfgang Grieskamp, Sam Blackshear, Junkil Park, Yoni Zohar, Clark Barrett, and David L. Dill. "The move prover." In *International Conference on Computer Aided Verification*, pp. 137-150. Springer, Cham, 2020.Harvard - - https://research.facebook.com/publications/the-move-prover/ -- Dill, David, Wolfgang Grieskamp, Junkil Park, Shaz Qadeer, Meng Xu, and Emma Zhong. "Fast and reliable formal verification of smart contracts with the Move prover." In *International Conference on Tools and Algorithms for the Construction and Analysis of Systems*, pp. 183-200. Springer, Cham, 2022.Harvard - - https://research.facebook.com/publications/fast-and-reliable-formal-verification-of-smart-contracts-with-the-move-prover/ diff --git a/developer-docs-site/docs/nodes/aptos-api-spec.md b/developer-docs-site/docs/nodes/aptos-api-spec.md deleted file mode 100644 index 1b646b0b590f8..0000000000000 --- a/developer-docs-site/docs/nodes/aptos-api-spec.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: "Aptos API Spec" -slug: "aptos-api-spec" -hide_table_of_contents: true -hide_title: true ---- - -import ApiExplorer from "@site/src/components/ApiExplorer"; -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - - - - - - - - - - - diff --git a/developer-docs-site/docs/nodes/full-node/_category_.json b/developer-docs-site/docs/nodes/full-node/_category_.json deleted file mode 100644 index 3e96cedcf78fb..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Public Fullnode", - "position": 10 -} diff --git a/developer-docs-site/docs/nodes/full-node/aptos-db-restore.md b/developer-docs-site/docs/nodes/full-node/aptos-db-restore.md deleted file mode 100644 index 75d0ae9a47390..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/aptos-db-restore.md +++ /dev/null @@ -1,118 +0,0 @@ ---- -title: "Bootstrap Fullnode from Backup" ---- - -# Bootstrap Fullnode from Backup - -Since the Aptos mainnet launch in October 2022, the Aptos community has grown rapidly. As of May 2023, Aptos has 743GB and 159GB of data in testnet and mainnet, respectively. We expect the data to increase greatly as more transactions are submitted to the blockchain. Facing such a large amount of data, we want to provide users with a way to achieve two goals: - -- Quickly bootstrap a database to start a new or failed node -- Efficiently recover data from any specific period - -Our database restore tool lets you use the existing [public backup files](#public-backup-files) to restore the database (i.e., the transaction history containing events, write sets, key-value pairs, etc.) on your local machine to any historical range or to the latest version. The public backup files are backed by cryptographic proof and stored on both AWS and Google Cloud for an easy download. - -## Public backup files - -Aptos Labs maintains a few publicly accessible database backups by continuously querying a local fullnode and storing the backup data in remote storage, such as Amazon S3 or Google Cloud Storage. - -| | AWS Backup Data | Google Cloud Backup Data | -| --- | --- | --- | -| Testnet | https://github.com/aptos-labs/aptos-networks/blob/main/testnet/backups/s3-public.yaml | https://github.com/aptos-labs/aptos-networks/blob/main/testnet/backups/gcs.yaml | -| Mainnet | https://github.com/aptos-labs/aptos-networks/blob/main/mainnet/backups/s3-public.yaml | https://github.com/aptos-labs/aptos-networks/blob/main/mainnet/backups/gcs.yaml | - -The backup files consist of three types of data that can be used to reconstruct the blockchain DB: - -- `epoch_ending` – It contains the ledger_info at the ending block of each epoch since the genesis. This data can be used to prove the epoch's provenance from the genesis and validator set of each epoch. -- `state_snapshot` – It contains a snapshot of the blockchain's state Merkle tree (SMT) and key values at a certain version. -- `transaction` – It contains the raw transaction metadata, payload, the executed outputs of the transaction after VM, and the cryptographic proof of the transaction in the ledger history. - -Each type of data in the backup storage is organized as follows: -- The metadata file in the metadata folder contains the range of each backup and the relative path to the backup folder. -- The backup contains a manifest file and all the actual chunked data files. - -![aptos-db-restore.png](../../../static/img/docs/aptos-db-restore.png) - -## Restore a DB using the public backup files - -The [Aptos CLI](../../tools/aptos-cli/use-cli/use-aptos-cli.md) supports two kinds of restore operations by reading from the public backup files: -1. Recreating a database with minimal transaction history at a user-specified transaction version (or the latest version the backup has) -2. Restoring the database over a specific period. In addition to the above, this option ensures that the recreated database carries the ledger history of the user-designated version range. - -Aptos CLI 1.0.14 or newer is needed to perform these operations. Additionally, depending on whether you use AWS or Google Cloud, install [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) or [gsutil](https://cloud.google.com/storage/docs/gsutil_install). - -### Bootstrap a DB - -The `aptos node bootstrap-db` command can quickly restore a database from the closest snapshot to a target version, but it does not restore the transaction history prior to the target version. - -Use the following options: -- `target-version` – The sync will begin from this period onwards in the transaction history. -- `command-adapter-config` – The path to one of the [YAML configuration files](#public-backup-files) that specifies the location of the public backup files and commands used by our backup and restore tool to interact with the remote storage. -- `target-db-dir` – The target DB path. - -Example command: - -```bash -aptos node bootstrap-db \ - --target-version 500000000 \ - --command-adapter-config /path/to/s3-public.yaml \ - --target-db-dir /path/to/local/db -``` - -### Restore a DB over a specific time period - -The `aptos node bootstrap-db` command can restore the transaction history within a specified period, along with the state Merkle tree at the target version. - -Use the following options: -- `ledger-history-start-version` – The version to which the DB will sync. -- `target-version` – The sync will begin from this period onwards in the transaction history. -- `command-adapter-config` – The path to one of the [YAML configuration files](#public-backup-files) that specifies the location of the public backup files and commands used by our backup and restore tool to interact with the remote storage. -- `target-db-dir` – The target DB path. - -Example command: - -```bash -aptos node bootstrap-db \ - --ledger-history-start-version 150000000 \ - --target-version 155000000 - --command-adapter-config /path/to/s3-public.yaml \ - --target-db-dir /path/to/local/db -``` - -### Restore a fullnode with full history from genesis -**Resource Requirements** - -* Open File Limit: Set the open file limit to 10K. -* Testnet: - * Disk: 1.5TB - * RAM: 32GB - * Duration: Approximately 10 hours to finish. -* Mainnet: - * Disk: 1TB - * RAM: 32GB - * Duration: Approximately 5 hours to finish. - - -To restore a fullnode with full history from genesis, set `ledger-history-start-version` to 0 and disable the pruner by [disabling the ledger pruner](../../guides/data-pruning.md). - -Example command: - -```bash -aptos node bootstrap-db \ ---ledger-history-start-version 0 \ ---target-version use_the_largest_version_in_backup \ ---command-adapter-config /path/to/s3-public.yaml \ ---target-db-dir /path/to/local/db -``` - -:::tip -If you don't specify the target_version (via `--target-version`), the tool will use the latest version in the backup as the target version. -::: - -Disable the pruner in the node config to prevent the early history from being pruned when you start the node. -```Yaml -storage: - storage_pruner_config: - ledger_pruner_config: - enable: false - -``` diff --git a/developer-docs-site/docs/nodes/full-node/bootstrap-fullnode.md b/developer-docs-site/docs/nodes/full-node/bootstrap-fullnode.md deleted file mode 100644 index 25a4ab9b02254..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/bootstrap-fullnode.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: "Bootstrap Fullnode from Snapshot" -slug: "bootstrap-fullnode" -sidebar_position: 14 ---- - -# Bootstrap a New Fullnode from Snapshot - -This document describes how to bootstrap a new Aptos fullnode quickly using a snapshot. Although you may bootstrap a new fullnode using [state-sync](../../guides/state-sync.md), this might not be an optimal approach after the network has been running for a while; it can either take too much time, or it won't be able to fetch all required data since most nodes have already pruned ledger history. The easiest way to bootstrap a new fullnode is using an existing _fullnode snapshot_. A fullnode snapshot is simply a copy of the storage data of an existing fullnode that can be used to help start other fullnodes more quickly. - -:::caution Proceed with caution -It is not recommended to use fullnode snapshots for running fullnodes in production on **mainnet**. This is because snapshots are not fully verified by the fullnode software. As a result, the snapshot may be invalid or contain incorrect data. To prevent security issues, we recommend bootstrapping from snapshot only for test environments, e.g., **devnet** and **testnet**. If you wish to bootstrap from snapshot for **mainnet**, do not use that node in a production environment. Finally, you should always verify that any snapshot you download comes from a reputable source to avoid downloading malicious files. -:::` - -## Find an existing fullnode snapshot - -There are a number of fullnode snapshots that can be downloaded from different Aptos community members. These include: -- BWareLabs (Testnet and Mainnet): [BWareLabs Aptos Node Snapshots](https://bwarelabs.com/snapshots) -- Polkachu (Mainnet): [Polkachu Aptos Node Snapshots](https://polkachu.com/aptos_snapshots/aptos) - -:::tip Questions about snapshot data -Depending on how the snapshot is constructed and compressed, the snapshot files may be different sizes. If you have any questions about the snapshot data, or run into any issues, please reach out to the Aptos community members directly via the [#node-support](https://discord.com/channels/945856774056083548/953421979136962560) channel in [Aptos Discord](https://discord.gg/aptosnetwork). -::: - -## Use an existing fullnode snapshot - -To use a snapshot, simply download and copy the files to the location of your storage database for the fullnode. This location can be found and updated in the fullnode `yaml` configuration file under `data_dir`. See [Start a public fullnode](fullnode-source-code-or-docker.md) for more information. \ No newline at end of file diff --git a/developer-docs-site/docs/nodes/full-node/fullnode-network-connections.md b/developer-docs-site/docs/nodes/full-node/fullnode-network-connections.md deleted file mode 100644 index d541adb44c8fa..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/fullnode-network-connections.md +++ /dev/null @@ -1,193 +0,0 @@ ---- -title: "Fullnode Network Connections" -slug: "fullnode-network-connections" ---- - -# Fullnode Network Connections - -When running a fullnode on an Aptos network, you can configure your node's -network connections for a few different purposes. For example, you can add -a seed peer to your node's configuration YAML to connect your node to a -specific peer of your choosing. Or you can create a static network identity -for your node to allow other nodes to connect to you, as described in [Network Identity For Fullnode](./network-identity-fullnode.md). - -This document describes how to configure the network of your fullnode for -different networks and requirements, including: - -- Allowing fullnodes to connect to your node. -- Connecting your fullnode to an Aptos blockchain deployment. -- Connecting your fullnode to seed peers. -- Configuring priority access for other fullnodes. -- Configuring your fullnode as a private fullnode. - -## Allowing fullnodes to connect to your node - -:::tip Before you proceed - -Before allowing other fullnodes to connect to your fullnode, -be sure to create a fullnode identity. See [Network Identity For Fullnode](./network-identity-fullnode.md). - -::: - -Once you start your fullnode with a static identity you can allow others to connect to your fullnode: - -:::tip - -In the below steps, the port numbers used are for illustration only. You can -use your choice of port numbers. See [Ports and port settings](../validator-node/operator/node-requirements.md#networking-requirements) for an explanation of port settings and how they are used. - -::: - -- Make sure you open port `6180` (or `6182`, for example, depending on which port your node is listening to) and that you open your firewall. -- If you are using Docker, simply add `- "6180:6180"` or `- "6182:6182"` under ports in your ``docker-compose.yaml`` file. -- Share your fullnode static network identity with others. They can then use it in the `seeds` key of their `fullnode.yaml` file to connect to your fullnode. See the section below. -- Make sure the port number you put in the `addresses` matches the one you have in the fullnode configuration file `fullnode.yaml` (for example, `6180` or `6182`). - -Share your fullnode static network identity in the following format in our Discord to advertise your node. -Note, the Discord channel to share your identity may differ depending on the blockchain deployment you're running in. -See [Aptos Blockchain Networks](../networks.md) for more information. - - ```yaml - : - addresses: - # with DNS - - "/dns4//tcp//noise-ik//handshake/0" - role: Upstream - : - addresses: - # with IP - - "/ip4//tcp//noise-ik//handshake/0" - role: Upstream - ``` - -For example: - - ```yaml - B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813: - addresses: - - "/dns4/pfn0.node.devnet.aptoslabs.com/tcp/6182/noise-ik/B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813/handshake/0" - role: "Upstream" - B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813: - addresses: - - "/ip4/100.20.221.187/tcp/6182/noise-ik/B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813/handshake/0" - role: "Upstream" - ``` - -:::tip - -Peer ID is synonymous with `AccountAddress`. - -::: - -## Connecting your fullnode to an Aptos blockchain deployment - -When running a fullnode on an Aptos blockchain deployment, your node will be -able to discover other nodes in the network automatically, e.g., using the -genesis blob or the network addresses of the validators and validator fullnodes -registered on the blockchain. Be sure to download the correct genesis blob and -waypoint for your fullnode to ensure your node connects to the correct Aptos -blockchain deployment. See [Aptos Blockchain Networks](../networks.md) -for more information. - -## Connecting your fullnode to seed peers - -All Aptos fullnodes are configured to accept a maximum number of network -connections. As a result, if the network is experiencing high network -connection volume, your fullnode might not able to connect to the default -nodes in the network and you may see several errors in your node's logs, e.g., -`No connected AptosNet peers!` or `Unable to fetch peers to poll!`. - -If this happens continuously, you should manually add seed peers to your node's -configuration file to connect to other nodes. - -:::tip - -You may see `No connected AptosNet peers!` or `Unable to fetch peers to poll!` in your node's error messages. This is normal when the node is first starting. -Wait for the node to run for a few minutes to see if it connects to peers. If not, follow the below steps: - -::: - -See below for a few seed peer addresses you can use in your -`public_full_node.yaml` file. The peers you choose will differ based on the -blockchain deployment your node is running in. - -:::tip - -You can also use the fullnode addresses provided by the Aptos community. Anyone already running a fullnode can provide their address for you to connect. See the Aptos Discord. - -::: - - -### Devnet seed peers - -To add seeds to your devnet fullnode, add these to your `public_full_node.yaml` configuration file under your `discovery_method`, as shown in the below example: - -```yaml -... -full_node_networks: - - discovery_method: "onchain" - listen_address: ... - seeds: # All seeds are declared below - bb14af025d226288a3488b4433cf5cb54d6a710365a2d95ac6ffbd9b9198a86a: - addresses: - - "/dns4/pfn0.node.devnet.aptoslabs.com/tcp/6182/noise-ik/bb14af025d226288a3488b4433cf5cb54d6a710365a2d95ac6ffbd9b9198a86a/handshake/0" - role: "Upstream" - 7fe8523388084607cdf78ff40e3e717652173b436ae1809df4a5fcfc67f8fc61: - addresses: - - "/dns4/pfn1.node.devnet.aptoslabs.com/tcp/6182/noise-ik/7fe8523388084607cdf78ff40e3e717652173b436ae1809df4a5fcfc67f8fc61/handshake/0" - role: "Upstream" - f6b135a59591677afc98168791551a0a476222516fdc55869d2b649c614d965b: - addresses: - - "/dns4/pfn2.node.devnet.aptoslabs.com/tcp/6182/noise-ik/f6b135a59591677afc98168791551a0a476222516fdc55869d2b649c614d965b/handshake/0" - role: "Upstream" -... -``` - -## Configuring priority access for other fullnodes - -To configure your fullnode to allow another fullnode to connect to it even -when your fullnode has hit the maximum number of available network connections, -follow this method: - -In the configuration file for your fullnode add the other fullnode as a seed -peer with the `Downstream` role. This will allow the other fullnode to connect -directly to you with priority access. In your fullnode configuration file, add: -```yaml -seeds: - - addresses: - -
- role: Downstream # Allows the node to connect to us -``` - -Similarly, to make the other fullnode connect to yours, add the following to the -other fullnode's configuration file: -```yaml -seeds: - - addresses: - -
- role: PreferredUpstream # Allows the node to connect to the seed peer -``` - -## Configuring your fullnode as a private fullnode - -You can also configure your fullnode as a private fullnode should you wish. -What this means is that your fullnode will not allow unauthenticated -connections, specifically, any node that is not a validator, validator -fullnode, or seed peer will be unable to connect to your fullnode. - -To configure your fullnode as a private fullnode, add the following to your -fullnode configuration file. Note, you should add this to the first network -entry in the `full_node_networks` configuration: - -```yaml -... -full_node_networks: - - discovery_method: "onchain" - listen_address: ... - max_inbound_connections: 0 # Prevents any unauthenticated inbound connections - mutual_authentication: true # Requires authenticated connections - ... -... -``` diff --git a/developer-docs-site/docs/nodes/full-node/fullnode-source-code-or-docker.md b/developer-docs-site/docs/nodes/full-node/fullnode-source-code-or-docker.md deleted file mode 100755 index ce3ca73ec377a..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/fullnode-source-code-or-docker.md +++ /dev/null @@ -1,298 +0,0 @@ ---- -title: "Run a Fullnode with Source or Docker" -slug: "fullnode-source-code-or-docker" -sidebar_position: 10 ---- - -# Run a Public Fullnode with the Aptos Source Code or Docker - -You can run your own [public fullnode](../../concepts/fullnodes.md) to synchronize with the state of the Aptos blockchain and stay up-to-date. Public fullnodes replicate the entire state of the blockchain by querying other Aptos fullnodes (public fullnodes or validator fullnodes) or validators. - -Alternatively, you can use the public fullnodes provided by Aptos Labs. However, such Aptos Labs-provided public fullnodes have rate limits, which can impede your development. By running your own public fullnode you can directly synchronize with the Aptos blockchain and avoid such rate limits. - -Public fullnodes can be run by anyone. This tutorial explains how to configure a public fullnode to connect to an Aptos network. - -:::caution Choose a network -This document describes how to start a public fullnode in the Aptos `mainnet` network yet can easily be used to do the same in the `devnet` and `testnet` networks. To do so, instead check out the desired branch and use the `genesis.blob` and `waypoint.txt` node files for the respective branch: [`mainnet`](../node-files-all-networks/node-files.md), [`devnet`](../node-files-all-networks/node-files-devnet.md), and [`testnet`](../node-files-all-networks/node-files-testnet.md). -::: - -:::tip Starting a node in testnet? -If this is the first time you're starting a fullnode in `testnet`, it is recommended to bootstrap your node first by restoring from a [backup](../full-node/aptos-db-restore.md) or downloading [a snapshot](../full-node/bootstrap-fullnode.md). This will avoid any potential issues with network connectivity and peer discovery. -::: - -## Hardware requirements - -We recommend the following hardware resources: - -- For running a production grade public fullnode: - - - **CPU**: 8 cores, 16 threads (Intel Xeon Skylake or newer). - - **Memory**: 32GB RAM. - -- For running the public fullnode for development or testing: - - - **CPU**: 2 cores. - - **Memory**: 4GB RAM. - -## Storage requirements - -The amount of data stored by Aptos depends on the ledger history (length) of the blockchain and the number of on-chain states (e.g., accounts). These values depend on several factors, including: the age of the blockchain, the average transaction rate and the configuration of the ledger pruner. Follow the storage requirements described in [Validator Hardware Requirements](../validator-node/operator/node-requirements.md#hardware-requirements). - -:::tip Devnet blockchain storage -The Aptos devnet is currently reset on a weekly basis. Hence we estimate that if you are connecting to the devnet, then the Aptos blockchain will not require more than several GBs of storage. See the `#devnet-release` channel on Aptos Discord. -::: - -## Configuring a public fullnode - -You can configure a public fullnode in one of two ways: - -1. Building and running [aptos-core](https://github.com/aptos-labs/aptos-core) from source code. -2. Using Docker. - -This document describes how to configure your public fullnode using both methods. - -### Method 1: Building and running from source - -See [Building Aptos From Source](../../guides/building-from-source.md) - -1. Check out the `mainnet` branch using `git checkout --track origin/mainnet`; remember, you may instead use `devnet` or `testnet`. - -1. Make sure your current working directory is `aptos-core`. - - Run: - ```bash - cp config/src/config/test_data/public_full_node.yaml fullnode.yaml - ``` - to create a copy of the public fullnode configuration YAML template. You will edit this file to ensure that your public fullnode: - - - Contains the correct genesis blob that is published by the Aptos mainnet. - - Synchronizes correctly with the mainnet, by using the checkpoint file `waypoint.txt` published by the mainnet. - - Stores the mainnet database at a location of your choice on your local machine. - -1. Make sure your current working directory is `aptos-core`. The Aptos mainnet publishes the `genesis.blob` and `waypoint.txt` files. Download them: - - - Run the below command on your terminal to download the file: - ```bash - curl -O https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/genesis.blob - ``` - - - Run the below command on your terminal to download the file: - ```bash - curl -O https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/waypoint.txt - ``` - - :::caution Don't want to connect to mainnet? - To connect to other networks (e.g., `devnet` and `testnet`), you can find genesis and waypoint here ➜ https://github.com/aptos-labs/aptos-networks. - Be sure to download the `genesis.blob` and `waypoint.txt` for those networks, instead of using the genesis - and waypoint pointed to by the `curl` commands above. - ::: - -1. Edit the `fullnode.yaml` file in your current working directory as follows. - - - Specify the correct path to the `waypoint.txt` you just downloaded by editing the `base.waypoint.from_file` in the `fullnode.yaml`. By default it points to `waypoint.txt` in the current working directory. - - For example: - ```yaml - base: - waypoint: - from_file: "./waypoint.txt" - ``` - - - For the `genesis_file_location` key, provide the full path to the `genesis.blob` file. For example: - - ```yaml - genesis_file_location: "./genesis.blob" - ``` - - - For the `data_dir` key in the `base` list, specify the directory where on your local computer you want to store the devnet database. This can be anywhere on your computer. For example, you can create a directory `my-full-node/data` in your home directory and specify it as: - - ```yaml - data_dir: "" - ``` - -1. Start your local public fullnode by running the below command: - - ```bash - cargo run -p aptos-node --release -- -f ./fullnode.yaml - ``` - -You have now successfully configured and started running a fullnode connected to Aptos devnet. - -:::tip Debugging? -This will build a release binary: `aptos-core/target/release/aptos-node`. The release binaries tend to be substantially faster than debug binaries but lack debugging information useful for development. To build a debug binary, omit the `--release` flag. - -You can also run this directly as `./aptos-core/target/release/aptos-node -f ./fullnode.yaml` after running `cargo build -p aptos-node --release` -::: - ---- - -### Method 2: Using Docker - -This section describes how to configure and run your public fullnode using Docker. - -:::danger Supported only on x86-64 CPUs -Running Aptos-core via Docker is currently only supported on x86-64 CPUs. If you have an Apple M1/M2 (ARM64) Mac, use the Aptos-core source approach. If M1/M2 support is important to you, comment on this issue: https://github.com/aptos-labs/aptos-core/issues/1412 -::: - -1. Install [Docker](https://docs.docker.com/get-docker/). -2. Run the following script to prepare your local configuration and data directory for mainnet: -```bash -mkdir mainnet && cd mainnet -mkdir data && \ -curl -O https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/fullnode.yaml && \ -curl -O https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/waypoint.txt && \ -curl -O https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/genesis.blob -``` - -3. Make sure that the `fullnode.yaml` configuration file that you downloaded contains only the following configuration content. This will ensure that this configuration is for public fullnode and not for either a validator node or a validator fullnode: - -```yaml -base: - role: "full_node" - data_dir: "/opt/aptos/data" - waypoint: - from_file: "/opt/aptos/etc/waypoint.txt" - -execution: - genesis_file_location: "/opt/aptos/etc/genesis.blob" - -full_node_networks: -- network_id: "public" - discovery_method: "onchain" - listen_address: "/ip4/0.0.0.0/tcp/6182" - -api: - enabled: true - address: "0.0.0.0:8080" -``` - -**NOTE**: Set `listen_address: "/ip4/127.0.0.1/tcp/6182"` if you do not want other full nodes connecting to yours. Also see the below note. - -4. Run the below `docker` command. **NOTE** the `mainnet` tag always refers to the latest official Docker image tag. You can find the latest hash for comparison at: -https://github.com/aptos-labs/aptos-networks/tree/main/mainnet - -```bash -docker run --pull=always \ - --rm -p 8080:8080 \ - -p 9101:9101 -p 6180:6180 \ - -v $(pwd):/opt/aptos/etc -v $(pwd)/data:/opt/aptos/data \ - --workdir /opt/aptos/etc \ - --name=aptos-fullnode aptoslabs/validator:mainnet aptos-node \ - -f /opt/aptos/etc/fullnode.yaml -``` - -**NOTE**: You may need to prefix the command with `sudo` depending on your configuration - -**NOTE**: Ensure you have opened the relevant ports: 8080, 9101 and 6180. You may also need to update the 127.0.0.1 with 0.0.0.0 in the `fullnode.yaml` for the fields `listen_address` and `address` field in the `api` list. - -:::caution Don't want to connect to mainnet? -To connect to other networks (e.g., `devnet` and `testnet`), you can find genesis and waypoint here ➜ https://github.com/aptos-labs/aptos-networks. -Be sure to download the `genesis.blob` and `waypoint.txt` for those networks, instead of using the genesis -and waypoint pointed to by the `curl` commands above. -::: - -Ensure you have opened the relevant ports: 8080, 9101 and 6180. You may also need to update the 127.0.0.1 with 0.0.0.0 in the `fullnode.yaml` for the fields `listen_address` and `address` field in the `api` list. - -## Verify the correctness of your public fullnode - -### Verify initial synchronization - -During the initial synchronization of your public fullnode, there may be a lot of data to transfer. You can monitor the progress by querying the metrics port to see what version your node is currently synced to. Run the following command to see the current synced version of your node: - -```bash -curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_state_sync_version{.*\"synced\"}" | awk '{print $2}' -``` - -The command will output the current synced version of your node. For example: - -```bash -71000 -``` - -Compare the synced version returned by this command (e.g., `71000`) with the highest version shown on the -[Aptos explorer page](https://explorer.aptoslabs.com/?network=mainnet). If your node is catching up to the highest version, it is synchronizing correctly. - -:::tip -It is fine if the explorer page differs by a few versions, as the explorer nodes may sync with some variance. -::: - -### (Optional) Verify outbound network connections - -Optionally, you can check the output network connections. The number of outbound network connections should be more than `0`. Run the following command: - -```bash -curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_connections{direction=\"outbound\"" -``` - -The above command will output the number of outbound network connections for your node. For example: - -```bash -curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_connections{direction=\"outbound\"" -aptos_connections{direction="outbound",network_id="Public",peer_id="aabd651f",role_type="full_node"} 3 -``` - -If the number of outbound connections returned is `0`, then it means your node cannot connect to the Aptos blockchain. If this happens to you, follow these steps to resolve the issue: - -1. Update your node to the latest release by following the [Update Fullnode With New Devnet Releases](./update-fullnode-with-new-releases.md). -2. Remove any `seed` peers you may have added to your `public_full_node.yaml` configuration file. The seeds may be preventing you from connecting to the network. Seed peers are discussed in the [Connecting your fullnode to seed peers](./fullnode-network-connections.md#connecting-your-fullnode-to-seed-peers) section. - -### (Optional) Examine Docker ledger size - -The blockchain ledger's volume for Aptos devnet can be monitored by entering the Docker container ID and checking the size. -This will allow you to see how much storage the blockchain ledger is currently consuming. - -- First, run `docker container ls` on your terminal and copy the NAME field output. This will be a string similar to `public_full_node_fullnode_1`. -- Next, run these commands to check the storage size consumed by the ledger, using the NAME field you copied over in place of `public_full_node_fullnode_1`: - -```bash -# Obtain the container ID: -id=$(docker container ls | grep public_full_node_fullnode_1 | grep -oE "^[0-9a-zA-Z]+") -# Enter the container: -docker exec -it $id /bin/bash -# Observe the volume (ledger) size: -du -cs -BM /opt/aptos/data -``` - -[rest_spec]: https://github.com/aptos-labs/aptos-core/tree/main/api -[devnet_genesis]: https://devnet.aptoslabs.com/genesis.blob -[devnet_waypoint]: https://devnet.aptoslabs.com/waypoint.txt -[aptos-labs/aptos-core]: https://github.com/aptos-labs/aptos-core.git -[status dashboard]: https://status.devnet.aptos.dev - -## Upgrade your public fullnode - -When receiving an update from Aptos for your fullnode, take these measures to minimize downtime. In all cases, you are essentially undoing setup and restarting anew. So first make sure your development environment is up to date. - -### Upgrading from source - -If you created your Aptos fullnode from source, you should similarly upgrade from source: -1. Stop your local public fullnode by running the below command: - ```bash - cargo stop aptos-node - ``` -1. Delete the `waypoint.txt`, `genesis.blob` and `fullnode.yaml` files previously downloaded, installed and configured. -1. Re-install and configure those files as during setup. -1. Restart your local public fullnode by running the same start (`run`) command as before: - ```bash - cargo run -p aptos-node --release -- -f ./fullnode.yaml - ``` - -### Upgrading with Docker - -If you created your Aptos fullnode with Docker, you should similarly upgrade with Docker: -1. Stop your local public fullnode by running the below command: - ```bash - docker-compose down --volumes - ``` -1. Delete the `waypoint.txt`, `genesis.blob` and `fullnode.yaml` files previously downloaded, installed and configured. -1. Re-install and configure those files as during setup. -1. Restart your local public fullnode by running the same start (`run`) command as before: - ```bash - docker run --pull=always \ - --rm -p 8080:8080 \ - -p 9101:9101 -p 6180:6180 \ - -v $(pwd):/opt/aptos/etc -v $(pwd)/data:/opt/aptos/data \ - --workdir /opt/aptos/etc \ - --name=aptos-fullnode aptoslabs/validator:mainnet aptos-node \ - -f /opt/aptos/etc/fullnode.yaml - ``` diff --git a/developer-docs-site/docs/nodes/full-node/index.md b/developer-docs-site/docs/nodes/full-node/index.md deleted file mode 100644 index 7027ac26dba6e..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/index.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: "Run a Fullnode" -slug: "public-fullnode" ---- - -# Run a Fullnode - -This section explains how to install a public fullnode to connect to the Aptos networks. See also the [external resources](../../community/external-resources.md) offered by your fellow node operators. - -:::tip Default connection to devnet -If you follow the default setup in this document, then your public fullnode will be connected to the Aptos devnet with a REST endpoint accessible on your computer at localhost:8080. - -To connect to a different Aptos network, such as devnet or testnet, make sure you have the correct version of: -- Docker image tag (e.g. devnet, testnet) -- Source code branch (if you build the binary directly) - -You can find genesis and waypoint here ➜ https://github.com/aptos-labs/aptos-networks. -::: - - -- ### [Fullnode Using Aptos Source or Docker](fullnode-source-code-or-docker.md) -- ### [Update Fullnode With New Releases](update-fullnode-with-new-releases.md) -- ### [Network Identity For Fullnode](network-identity-fullnode.md) -- ### [Fullnode Network Connections](fullnode-network-connections.md) -- ### [Run a Fullnode on GCP](run-a-fullnode-on-gcp.md) -- ### [Bootstrap a New Fullnode](bootstrap-fullnode.md) \ No newline at end of file diff --git a/developer-docs-site/docs/nodes/full-node/network-identity-fullnode.md b/developer-docs-site/docs/nodes/full-node/network-identity-fullnode.md deleted file mode 100644 index 91abf0ed6f716..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/network-identity-fullnode.md +++ /dev/null @@ -1,199 +0,0 @@ ---- -title: "Network Identity For Fullnode" -slug: "network-identity-fullnode" ---- - -# Network Identity For Fullnode - -Fullnodes will automatically start up with a randomly generated network identity. This works well for regular fullnodes. However: - -- You may want your fullnode to be added to a specific upstream fullnode's allowlist (i.e., another fullnode participant in the Aptos network), because: - - - You might require specific permissions for your fullnode on this specific upstream fullnode, or - - This upstream fullnode only allows known identities to connect to it, or - - You may wish to advertise your fullnode for other Aptos fullnodes to connect to (to help support the Aptos network). - -In such cases, it helps if you run your fullnode with a static network identity, instead of a randomly generated network identity that keeps changing every time you start up your fullnode. - -This guide will show you how to: - -- Create a static network identity for your fullnode. -- Start a node with a static network identity. - -## Before you proceed - -Before you proceed, make sure that you already know how to start your local fullnode. See [Run a Fullnode](./index.md) for detailed documentation. - -:::caution Docker support only on Linux - -Docker container is currently supported only on Linux x86-64 platform. If you are on macOS or Windows platform, use the Aptos-core source approach. - -::: - -## Creating a static identity for a fullnode - -To create a static identity for your fullnode: - -1. You first create a private key, public key pair for your fullnode. -2. Next you derive the `peer_id` from the public key. -3. Finally, you use the `peer_id` in your `fullnode.yaml` to create a static network identity for your fullnode. - -Follow the below detailed steps: - -1. Preparation - - **Using Aptos-core source code** - - See [Building Aptos From Source](../../guides/building-from-source.md) - - **Using Docker** - - Alternatively, if you are on Linux x86-64 platform, you can use the Aptos Docker image. - - `cd` into the directory for your local public fullnode and start a Docker container with the latest tools, for example: - - ```bash - cd ~/my-full-node - docker run -it aptoslabs/tools:devnet /bin/bash - ``` - -2. Generate the private key - - **Using Aptos-core source code** - - Run the [Aptos CLI](../../tools/aptos-cli/use-cli/use-aptos-cli.md) `aptos` to produce a hex encoded static x25519 private key. This will be the private key for your network identity. - - :::tip - - The below command will also create a corresponding `private-key.txt.pub` file with the public identity key in it. - - ::: - - ```bash - aptos key generate --key-type x25519 --output-file /path/to/private-key.txt - - ``` - - Example `private-key.txt` and the associated `private-key.txt.pub` files are shown below: - - ```bash - cat ~/private-key.txt - C83110913CBE4583F820FABEB7514293624E46862FAE1FD339B923F0CACC647D% - - cat ~/private-key.txt.pub - B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813% - ``` - - **Using Docker** - - Run this step from inside the `aptoslabs/tools` Docker container. Open a new terminal and `cd` into the directory where you started the Docker container for your fullnode. Making sure to provide the full path to where you want the private key TXT file to be stored, run the command as below: - - ```bash - aptos key generate \ - --key-type x25519 \ - --output-file /path/to/private-key.txt - ``` - -3. Retrieve the peer identity - - **Using Aptos-core source code** - - :::tip Required: host information - Use the `--host` flag to provide the host information to output a network address for the fullnode. - ::: - - ```bash - aptos key extract-peer --host example.com:6180 \ - --public-network-key-file private-key.txt.pub \ - --output-file peer-info.yaml - ``` - which will produce the following output: - ```json - { - "Result": { - "B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813": { - "addresses": [ - "/dns/example.com/tcp/6180/noise-ik/0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813/handshake/0" - ], - "keys": [ - "0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813" - ], - "role": "Upstream" - } - } - } - ``` - or - ```bash - aptos key extract-peer --host 1.1.1.1:6180 \ - --public-network-key-file private-key.txt.pub \ - --output-file peer-info.yaml - ``` - which will produce the following output: - ```json - { - "Result": { - "B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813": { - "addresses": [ - "/ip4/1.1.1.1/tcp/6180/noise-ik/0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813/handshake/0" - ], - "keys": [ - "0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813" - ], - "role": "Upstream" - } - } - } - ``` - - **Using Docker** - - Run the same above commands to extract the peer from inside the `aptoslabs/tools` Docker container. For example: - - ```bash - aptos key extract-peer --host 1.1.1.1:6180 \ - --public-network-key-file /path/to/private-key.txt.pub \ - --output-file /path/to/peer-info.yaml - ``` - - This will create a YAML file that will have your `peer_id` corresponding to the `private-key.txt` you provided. - - Example output `peer-info.yaml` for the `--host example.com:6180` option: - - ```yaml - --- - B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813: - addresses: ["/dns/example.com/tcp/6180/noise-ik/0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813/handshake/0"] - keys: - - "0xB881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813" - role: Upstream - ``` - - In this example, `B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813` is the `peer_id`. Use this in the `peer_id` field of your `fullnode.yaml` to create a static identity for your fullnode. - - -## Start a node with a static network identity - -After you generated the public identity key you can startup the fullnode with a static network identity by using the public key in the `peer_id` field of the configuration file `fullnode.yaml`: - -```yaml -full_node_networks: -- network_id: "public" - discovery_method: "onchain" - identity: - type: "from_config" - key: "" - peer_id: "" -``` - -In our example, you would specify the above-generated `peer_id` in place of the ``: - -```yaml -full_node_networks: -- network_id: "public" - discovery_method: "onchain" - identity: - type: "from_config" - key: "C83110913CBE4583F820FABEB7514293624E46862FAE1FD339B923F0CACC647D" - peer_id: "B881EA2C174D8211C123E5A91D86227DB116A44BB345A6E66874F83D8993F813" -``` diff --git a/developer-docs-site/docs/nodes/full-node/run-a-fullnode-on-gcp.md b/developer-docs-site/docs/nodes/full-node/run-a-fullnode-on-gcp.md deleted file mode 100755 index c6b0dee18a5f4..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/run-a-fullnode-on-gcp.md +++ /dev/null @@ -1,373 +0,0 @@ ---- -title: "Run a Public Fullnode on GCP" -slug: "run-a-fullnode-on-gcp" ---- - -# Run a Public Fullnode on GCP - -This tutorial explains how to configure and deploy a public fullnode to connect to the Aptos devnet using Google Cloud (GCP). Running a public fullnode in the cloud usually provides better stability and availability compared to running it on your laptop. **If you are looking to deploy a production grade public fullnode, we recommend you to deploy it on the cloud.** - -:::tip Alternative methods for running a public fullnode -Read [Public Fullnode](./index.md) if you want other options for deployment. Using cloud comes with a cost, and it varies depending on how you configure it. -::: - -## Prerequisites - -You can run the commands in this guide to deploy your FullNode on Google Kubernetes Engine from any machine you want: From a [VM on GCP](https://cloud.google.com/compute), or [Google Cloud Shell](https://cloud.google.com/shell), or your personal computer. - -The following packages are pre-installed with Cloud Shell. **Make sure to review** the [documentation around ephemeral mode](https://cloud.google.com/shell/docs/using-cloud-shell/#choose_ephemeral_mode) if you choose to use Cloud Shell. - -However, if you are running the installation from your laptop or another machine, you need to install: - -* Terraform 1.3.6: https://www.terraform.io/downloads.html -* Kubernetes cli: https://kubernetes.io/docs/tasks/tools/ -* Google Cloud cli: https://cloud.google.com/sdk/docs/install-sdk - -After you have installed the gcloud CLI, [log into GCP using gcloud](https://cloud.google.com/sdk/gcloud/reference/auth/login): - -```bash -gcloud auth login --update-adc -``` - -:::tip Already have a GCP account set up? -If you already have a GCP account setup, skip to [Getting Started](#getting-started). If you do not have a GCP account, then follow the below sections to create and configure your GCP account. -::: - -### GCP setup - -#### Sign up for the 90-day free trial - -Google Cloud offers a [90 day $300 free trial for every new user](https://cloud.google.com/free/docs/gcp-free-tier/#free-trial). These $300 are given as credits to your account and you can use them to get a sense of Google Cloud products. Be aware that you will need to add payment information when signing up for the free trial. This is for identity verification purposes and [will not incur charges until you upgrade to a paid account and run out of credits](https://cloud.google.com/free/docs/gcp-free-tier/#:~:text=Don%27t%20worry%2C%20setting,90%2Dday%20period).). Some GCP feature such as GPUs and Windows servers are not available in the free trial. - -:::tip Sign up for GCP $300 credits -[Sign up for the $300 in credits here.](https://cloud.google.com/free) -::: - -#### Create a new GCP project - -- Create a new project on the GCP Console or using the gcloud command from the Google Cloud CLI. Before you do that, familiarize yourself with the [resource hierarchy on GCP](https://cloud.google.com/resource-manager/docs/cloud-platform-resource-hierarchy). -- [Follow these instructions to setup a new project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#creating_a_project). - -#### Enable billing, upgrade your account - -You will still be able to use the free trial credits, but enabling billing allows you to have full access to all the features of GCP and not experience any interruption to your nodes. - -[Upgrade your account by following the steps outlined here.](https://cloud.google.com/free/docs/gcp-free-tier#how-to-upgrade) - -#### Additional GCP resources - -This should be enough to get your GCP setup ready to start deploying your fullnode. But if you are brand new to GCP, you may want to check out some of our [quickstart guides](https://cloud.google.com/docs/get-started/quickstarts) and [Google Cloud Skills Boost](https://www.cloudskillsboost.google/catalog). - - -## Getting started - -:::tip Before you proceed -From here on, this guide assumes that you have already set up your GCP account, and have created a new project for deploying Aptos public fullnode. If you don't, check out the instructions above for [GCP Setup](#gcp-setup). -::: - -You can deploy a public fullnode on GCP by using the Aptos fullnode Terraform module. - -1. Create a working directory for your configuration. - - * Choose a workspace name e.g. `devnet`. **Note**: This defines the Terraform workspace name, which in turn is used to form resource names. - ```bash - export WORKSPACE=devnet - ``` - - * Create a directory for the workspace - ```bash - mkdir -p ~/$WORKSPACE - ``` - -2. Create a storage bucket for storing the Terraform state on Google Cloud Storage. Use the console or this gcs command to create the bucket. The name of the bucket must be unique. See the Google Cloud Storage documentation here: https://cloud.google.com/storage/docs/creating-buckets#prereq-cli. - - ```bash - gsutil mb gs://BUCKET_NAME - # for example - gsutil mb gs://-aptos-terraform-dev - ``` - -3. Create Terraform file called `main.tf` in your working directory: - ```bash - cd ~/$WORKSPACE - touch main.tf - ``` - -4. Modify the `main.tf` file to configure Terraform and create a public fullnode from the Terraform module. - -**Note:** If you are using a different version of Terraform, you will need to use the `tfenv` command to change the required version. - -You can find the Docker image tag at https://hub.docker.com/r/aptoslabs/validator/tags?page=1&ordering=last_updated&name=devnet - -Example content for `main.tf`: - - ```rust - terraform { - required_version = "~> 1.3.6" - backend "gcs" { - bucket = "BUCKET_NAME" # bucket name created in step 2 - prefix = "state/fullnode" - } - } - - module "fullnode" { - # download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/fullnode/gcp?ref=main" - region = "us-central1" # Specify the region - zone = "c" # Specify the zone suffix - project = "gcp-fullnode" # Specify your GCP project ID - fullnode_name = "BUCKET_NAME" #bucket name created in step 2 - era = 1 # bump era number to wipe the chain - image_tag = "devnet" # Specify the docker image tag - } - ``` - -5. Initialize Terraform in the same directory of your `main.tf` file: - - ```bash - terraform init - ``` - -This will download all the Terraform dependencies into the `.terraform` folder. - -6. Create a new Terraform workspace to isolate your environments: - - ```bash - terraform workspace new $WORKSPACE - # This command will list all workspaces - terraform workspace list - ``` - -7. Apply the configuration: - - ```bash - terraform apply - ``` - - This might take a while to finish (10 - 20 minutes), Terraform will create all the resources on your cloud account. - -## Validation - -Once Terraform apply finished, you can follow this section to validate your deployment. - -1. Configure your Kubernetes client to access the cluster you just deployed: - - ```bash - gcloud container clusters get-credentials aptos-$WORKSPACE --zone --project - # for example: - gcloud container clusters get-credentials aptos-devnet --zone us-central1-a --project aptos-fullnode - ``` - -2. Check that your public fullnode pods are now running (this may take a few minutes): - - ```bash - kubectl get pods -n aptos - ``` -You should see this: - -``` -NAME READY STATUS RESTARTS AGE -devnet0-aptos-fullnode-0 1/1 Running 0 56s -``` - -3. Get your public fullnode IP: - - ```bash - kubectl get svc -o custom-columns=IP:status.loadBalancer.ingress -n aptos - ``` - - You should see this: - - ```IP -[map[ip:104.198.36.142]] -``` - -4. Check the REST API, make sure that the ledger version is increasing: - - ```bash - curl http:///v1 - # Example command syntax: curl http://104.198.36.142/v1 - ``` - - You should see this: - ``` - {"chain_id":25,"epoch":"22","ledger_version":"9019844","oldest_ledger_version":"0","ledger_timestamp":"1661620200131348","node_role":"full_node","oldest_block_height":"0","block_height":"1825467"} -``` - -5. To verify the correctness of your public fullnode, as outlined in the section [Verify the correctness of your FullNode](./fullnode-source-code-or-docker.md#verify-the-correctness-of-your-public-fullnode), you will need to: - - Set up a port-forwarding mechanism directly to the aptos pod in one ssh terminal, and - - Test it in another ssh terminal. - - Follow the below steps: - - * Set up the port-forwarding to the aptos-fullnode pod. Use `kubectl get pods -n aptos` to get the name of the pod: - - ```bash - kubectl port-forward -n aptos 9101:9101 - # for example: - kubectl port-forward -n aptos devnet0-aptos-fullnode-0 9101:9101 - ``` - - * Open a new ssh terminal. Execute the following curl calls to verify the correctness: - - ```bash - curl -v http://0:9101/metrics 2> /dev/null | grep "aptos_state_sync_version{type=\"synced\"}" - - curl -v http://0:9101/metrics 2> /dev/null | grep "aptos_connections{direction=\"outbound\"" - ``` - - * Exit port-forwarding when you are done by entering control-c in the terminal. - - -## Configure identity and seed peers - -### Static identity - -If you want to configure your node with a static identity, first see the [Network Identity For Fullnode](./network-identity-fullnode.md) for how to generate the keys, and then follow the below instructions to configure your Terraform file. - -1. Generate your own private key, and extract peer id, following the guide [Creating a static identity for a fullnode](./network-identity-fullnode.md#creating-a-static-identity-for-a-fullnode). - -2. Modify the `main.tf` to add `fullnode_identity` in `fullnode_helm_values`. This will configure the keys for public fullnode, for example: - - ```rust - module "fullnode" { - # download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/fullnode/gcp?ref=main" - region = "us-central1" # Specify the region - zone = "c" # Specify the zone suffix - project = "gcp-fullnode" # Specify your GCP project name - era = 1 # bump era number to wipe the chain - image_tag = "devnet" # Specify the docker image tag to use - - fullnode_helm_values = { - chain = { - name = "devnet" - } - # create fullnode from this identity config, so it will always have same peer id and address - fullnode_identity = { - type = "from_config" - key = "B8BD811A91D8E6E0C6DAC991009F189337378760B55F3AD05580235325615C74" - peer_id = "ca3579457555c80fc7bb39964eb298c414fd60f81a2f8eedb0244ec07a26e575" - } - } - } - ``` - -3. Apply Terraform changes: - - ```bash - terraform apply - ``` - -### Add upstream seed peers - -You can add upstream seed peers to allow your node state sync from a specific. This is helpful when the public fullnode is not able to connect to the network due to congestion. - -1. Obtain the upstream peer id information. You can either use the one listed in the [Connecting your fullnode to seed peers](./fullnode-network-connections.md#connecting-your-fullnode-to-seed-peers), or grab one from the [Aptos Discord](https://discord.gg/aptosnetwork) [#advertise-full-node](https://discord.com/channels/945856774056083548/956342147546746901) channel; these are the nodes hosted by our community. - -2. Modify the `main.tf` to add seeds for devnet in `fullnode_helm_values`. This will configure the upstream seeds for public fullnode. For example: - -```rust -module "fullnode" { - # download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/fullnode/gcp?ref=main" - region = "us-central1" # Specify the region - zone = "c" # Specify the zone suffix - project = "gcp-fullnode" # Specify your GCP project name - era = 1 # bump era number to wipe the chain - image_tag = "dev_5b525691" # Specify the docker image tag to use - - fullnode_helm_values = { - # add a list of peers as upstream - aptos_chains = { - devnet = { - seeds = { - "bb14af025d226288a3488b4433cf5cb54d6a710365a2d95ac6ffbd9b9198a86a" = { - addresses = ["/dns4/pfn0.node.devnet.aptoslabs.com/tcp/6182/noise-ik/bb14af025d226288a3488b4433cf5cb54d6a710365a2d95ac6ffbd9b9198a86a/handshake/0"] - role = "Upstream" - }, - "7fe8523388084607cdf78ff40e3e717652173b436ae1809df4a5fcfc67f8fc61" = { - addresses = ["/dns4/pfn1.node.devnet.aptoslabs.com/tcp/6182/noise-ik/7fe8523388084607cdf78ff40e3e717652173b436ae1809df4a5fcfc67f8fc61/handshake/0"] - role = "Upstream" - }, - "f6b135a59591677afc98168791551a0a476222516fdc55869d2b649c614d965b" = { - addresses = ["/dns4/pfn2.node.devnet.aptoslabs.com/tcp/6182/noise-ik/f6b135a59591677afc98168791551a0a476222516fdc55869d2b649c614d965b/handshake/0"] - role = "Upstream" - } - } - } - } - } - } -``` - -3. Apply Terraform changes: - - ```bash - terraform apply - ``` - -## Check logging - -To check the logs of the pod, use the following commands: - - ```bash - # Get a list of the pods - kubectl get pods -n aptos - - # Get logs of the pod - kubectl logs -n aptos - # for example: - kubectl logs devnet0-aptos-fullnode-0 -n aptos - ``` - -When using GKE, the logs of the cluster and pod will automatically show up in the Google Cloud console. From the console menu, choose `Kubernetes Engine`. From the side menu, choose `Workloads`. You will see all the pods from the cluster listed. - - -![GKE Workloads screenshot](../../../static/img/tutorial-gcp-logging1.png "GKE Workloads screenshot") - - -The `devnet0-aptos-fullnode` is the pod that is running the aptos fullnode container. Click on the pod to view details. You will see some metrics and other details about the pod. - - -![GKE Workloads Pod screenshot](../../../static/img/tutorial-gcp-logging2.png "GKE Workloads Pod screenshot") - - -Click the `LOGS` tab to view the logs directly from the pod. If there are errors in the pod, you will see them here. - - -![GKE Workloads Pod Logs screenshot](../../../static/img/tutorial-gcp-logging3.png "GKE Workloads Pod Logs screenshot") - - -Click the `open in new window` icon to view the logs in the Log Explorer. This screen allows advanced searching in the logs. - - -![GKE Workloads Pod Logs Explorer screenshot](../../../static/img/tutorial-gcp-logging4.png "GKE Workloads Pod Logs Explorer screenshot") - - - -Other logging insights are available in the Logs Dashboard - - -![GKE Workloads Pod Logs Dashboard screenshot](../../../static/img/tutorial-gcp-logging5.png "GKE Workloads Pod Logs Dashboard screenshot") - - - -Additional [features](https://cloud.google.com/logging/docs) are available through [Cloud Logging](https://cloud.google.com/logging), including creating log-based metrics, logging sinks and log buckets. - - -## Check monitoring - -Google cloud captures many metrics from the cluster and makes them easily viewable in the console. From the console menu, choose `Kubernetes Engine`. Click on the cluster that aptos is deployed to. Click on the `Operations` link at the top right. Click on the `Metrics` sub-tab to view specific cluster metrics. - - -![GKE Monitoring metrics screenshot](../../../static/img/tutorial-gcp-mon1.png "GKE Monitoring metrics screenshot") - - -Click the `View in Cloud Monitoring` link at the top to view the built-in GKE [dashboard](https://cloud.google.com/stackdriver/docs/solutions/gke/observing) for the cluster. - - -![GKE Monitoring dashboard screenshot](../../../static/img/tutorial-gcp-mon2.png "GKE Monitoring dashboard screenshot") - - -Google Cloud [Monitoring](https://cloud.google.com/monitoring) has many other features to easily monitor the cluster and pods. You can configure [uptime checks](https://cloud.google.com/monitoring/uptime-checks/introduction) for the services and configure [alerts](https://cloud.google.com/monitoring/alerts/using-alerting-ui) for when the metrics reach a certain [threshold](https://cloud.google.com/stackdriver/docs/solutions/slo-monitoring/sli-metrics/overview). diff --git a/developer-docs-site/docs/nodes/full-node/update-fullnode-with-new-releases.md b/developer-docs-site/docs/nodes/full-node/update-fullnode-with-new-releases.md deleted file mode 100644 index decbed2361518..0000000000000 --- a/developer-docs-site/docs/nodes/full-node/update-fullnode-with-new-releases.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: "Update Fullnode With New Devnet Releases" -slug: "update-fullnode-with-new-devnet-releases" -sidebar_position: 11 ---- - -# Update Fullnode With New Releases - -When `devnet` is wiped and updated with newer versions, you will need to update your fullnode as well. If you do not, your fullnode will not continue to synchronize with the network. To update your fullnode, follow these steps: - -## If you built the fullnode from aptos-core source code - -1. Shutdown your fullnode. - -2. Delete the data folder (the directory path is what you specified in the configuration file, e.g., `fullnode.yaml`). - - - The default data folder is `/opt/aptos/data`. - -3. Delete the `genesis.blob` file and `waypoint.txt` file (depending on how you configured it, you might not have this file and may instead have a `waypoint` directly in your configuration file). - -4. If you use the Rust binary, pull the latest of `devnet` via `git checkout devnet && git pull`, and build the binary again. - -5. Download the new [genesis.blob](../node-files-all-networks/node-files.md#genesisblob) file and the new [waypoint](../node-files-all-networks/node-files.md#waypointtxt). - -6. Update the configuration file (e.g., `fullnode.yaml`) with the new waypoint (if you configure the waypoint directly there). - -7. Restart the fullnode. - -8. See the [Verify initial synchronization](./fullnode-source-code-or-docker.md#verify-initial-synchronization) section for checking if the fullnode is syncing again. - -## If you run a fullnode via Docker - -1. Shutdown your fullnode -2. Delete the entire directory which holds your fullnode config and data directory. -3. Rerun the instructions on [Approach #2: Using Docker](./fullnode-source-code-or-docker.md#Approach-#2:-Using-Docker) - -## If you run a fullnode on GCP - -Aptos devnet releases can be of two types: -- One with a data wipe to start over the Aptos blockchain -- Second type is only a software update without a data wipe - -### Upgrade with data wipe - -1. You can increase the `era` number in `main.tf` to trigger a new data volume creation, which will start the node on a new DB. - -2. Update `image_tag` in `main.tf`. - -3. Update Terraform module for fullnode, run this in the same directory of your `main.tf` file: - - ```bash - terraform get -update - ``` - -4. Apply Terraform changes: - - ```bash - terraform apply - ``` - -### Upgrade without data wipe - -1. Update `image_tag` in `main.tf`. - -2. Update Terraform module for fullnode, run this in the same directory of your `main.tf` file: - - ```bash - terraform get -update - ``` - -3. Apply Terraform changes: - - ```bash - terraform apply - # if you didn't update the image tag, terraform will show nothing to change, in this case, force helm update - terraform apply -var force_helm_update=true - ``` - -[rest_spec]: https://github.com/aptos-labs/aptos-core/tree/main/api -[devnet_genesis]: https://devnet.aptoslabs.com/genesis.blob -[devnet_waypoint]: https://devnet.aptoslabs.com/waypoint.txt -[aptos-labs/aptos-core]: https://github.com/aptos-labs/aptos-core.git -[status dashboard]: https://status.devnet.aptos.dev diff --git a/developer-docs-site/docs/nodes/identity-and-configuration.md b/developer-docs-site/docs/nodes/identity-and-configuration.md deleted file mode 100644 index 8ded5760d3d03..0000000000000 --- a/developer-docs-site/docs/nodes/identity-and-configuration.md +++ /dev/null @@ -1,199 +0,0 @@ ---- -title: "Identity and Configuration" -slug: "identity-and-configuration" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - - -# Node Identity and Configuration - -When installing a node on an Aptos network, the installation steps require you to work with identities and configurations. This document describes how to interpret the terms **identity** and **configuration**, and presents a description of the identity YAML files. - -## Concept - -This section presents a mental-model view of an identity and configuration. It is meant to help make the node installation process easy. - -The terms **identity** and **configuration** should be understood in the following way: - -- The terms **validator node**, **fullnode**, and **validator fullnode** refer to the machine (physical or virtual). -- The terms **operator**, **owner** and **voter** refer to the persona. -- A machine has both an identity and a configuration. They are defined in separate YAML files. A persona's identity and configuration are combined into a single YAML file. - -### Machine - -#### Identity - -Machine **identity** is defined in a YAML file. An identity is established by means of keys. A machine identity YAML contains only private keys. Moreover, an identity YAML always contains the associated blockchain account address. - -A machine identity YAML has the string `identity` in its name. For example: - -- validator-**identity**.yaml contains the private keys for the validator node. -- validator-full-node-**identity**.yaml contains the private keys for validator fullnode and public fullnode. - -Hence if you are looking for your machine’s private keys, look for YAML filenames with `identity` in them. - -#### Configuration - -Machine **configuration** is also defined in a YAML file. A machine configuration YAML **never contains any key, public or private**. For example, the configuration YAMLs validator.yaml, fullnode.yaml, docker-compose.yaml and docker-compose-fullnode.yaml **do not contain any keys.** - -As noted earlier, a machine has an identity and a configuration. Hence: - -- For a validator node, identity is defined in validator-**identity**.yaml and configuration is in validator.yaml. -- For a validator fullnode, its identity is defined in validator-full-node-**identity**.yaml and its configuration is defined in fullnode.yaml. - -### Persona - -#### Identity and configuration - -A persona has a single YAML that combines the persona’s identity and configuration information. For example, for the three personas, owner, operator and voter: - -- An owner's identity-configuration is defined in **owner.yaml**. The owner.yaml contains the public keys and blockchain account addresses for owner, operator and voter, and some owner-specific configuration such as stake amount and commission percentage. -- An operator’s identity-configuration is defined in **operator.yaml**. The operator.yaml contains public keys and blockchain account address for the operator and some machine configuration information plus a consensus public key and consensus proof of possession key. **Only the operator has the consensus keys.** Neither the owner nor the voter has the consensus keys. -- A voter's identity-configuration, i.e., voter.yaml, does not exist. - -## Description of identity YAMLs - -This section explains the following key and identity YAML files that are generated during the deployment of a validator node: - -- `public-keys.yaml`. -- `private-keys.yaml`. -- `validator-identity.yaml`. -- `validator-full-node-identity.yaml`. - -The following command is used to generate the above key and identity YAMLs. See, for example, [Step 10 while using AWS to deploy the validator node](./validator-node/operator/running-validator-node/using-aws.md), or in [Step 10 while using GCP](./validator-node/operator/running-validator-node/using-gcp.md). - -```bash -aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys -``` - -See below a diagram that shows how the identities for the validator node and validator fullnode are derived from the private and public keys: - - - -### public-keys.yaml - -#### Example - -Click below to see an example YAML configuration: -
-public-keys.yaml - -```yaml ---- -account_address: a5a643aa695fc5f34927386c8d767cddcc0607933f40c89a7ad78de7804965b8 -account_public_key: "0x9ccfc50f334064e1b24455029a5bc1646a2c4dd2b1433de1364470692ba6b99b" -consensus_public_key: "0xa7e8334381d9f80d33d70da543aea22c87fe9862ab7df5cbef9ee11b5285b89c56e0e7a3a78c1561833b2d6fa4d9d4bf" -consensus_proof_of_possession: "0xa51dfd1734e581df99c4c637324ee38c3e48e51c61c1e1dd03bd5a84cf1cd5b2fa00e976b9a9ea0e0908f0d53085318c03f24de3ebf86b07ff883effe0142e0d3f24c7c1e36dd198ea4d8eb6f5c5a2f3a188de22720bd1914a9effa6f595de38" -full_node_network_public_key: "0xa6845691a00d6cfdaa9823c4d12b2b5e13d2ecfdc3049d0f2838c805bfd01633" -validator_network_public_key: "0x71f2642aeaa6cbfacf75663cf14d2f6e9e1bd890f9bc1c96900fd225cce01836" -``` - -
- -#### Description - -| public-keys.yaml | Description | -| --- | --- | -| account_address |The Aptos blockchain account address for the operator, i.e., the persona who deploys the validator node. | -| account_public_key | The public key associated with the blockchain account. | -| consensus_public_key | Used only by the operator for validation purpose. | -| consensus_proof_of_possession | Used only by the operator for validation purpose. | -| full_node_network_public_key | The public key for the fullnode by which a VFN (validator fullnode) or a PFN (public fullnode) is identified in the Aptos network. | -| validator_network_public_key | The public key for the validator node and by which the validator node is identified in the Aptos network. | - -### private-keys.yaml - -#### Example - -Click below to see an example YAML configuration: -
-private-keys.yaml - - -```yaml ---- -account_address: a5a643aa695fc5f34927386c8d767cddcc0607933f40c89a7ad78de7804965b8 -account_private_key: "0x80478d60a52f54a88e7095abf48b1f4294a335b30f1066cd73768b9b789e833f" -consensus_private_key: "0x4aedda33ef3fd71243eb2a926307d8826c95b9939f88e753d62d9bc577e99916" -full_node_network_private_key: "0x689c11c6e5405219b5eae1312086c801e3a044946afc74429e5157b46fb65b61" -validator_network_private_key: "0xa03ec46b24f2f1066d7980dc13b4baf722ba60c367e498e47a657ba0815adb58" -``` - -
- -#### Description - -| private-keys.yaml | Description | -| --- | --- | -| account_address | The Aptos blockchain account address for the operator, i.e., the persona who deploys the validator node. | -| account_private_key | The private key associated with the blockchain account. | -| consensus_private_key | The consensus private key, used only by the operator for validation purpose and for rotating the consensus key.| -| full_node_network_private_key |The private key for the fullnode. Whoever holds this private key will be able to establish the ownership of the VFN and PFN in the Aptos network. | -| validator_network_private_key | The private key for the validator node. Whoever holds this private key will be able to establish the ownership of the validator node in the Aptos network. | - -### validator-identity.yaml - -#### Example - -Click below to see an example YAML configuration: - -
-validator-identity.yaml - - -```yaml ---- -account_address: a5a643aa695fc5f34927386c8d767cddcc0607933f40c89a7ad78de7804965b8 -account_private_key: "0x80478d60a52f54a88e7095abf48b1f4294a335b30f1066cd73768b9b789e833f" -consensus_private_key: "0x4aedda33ef3fd71243eb2a926307d8826c95b9939f88e753d62d9bc577e99916" -network_private_key: "0xa03ec46b24f2f1066d7980dc13b4baf722ba60c367e498e47a657ba0815adb58" -``` - -
- -#### Description - -| validator-identity.yaml | Description | -| --- | --- | -| account_address | The Aptos blockchain account address for the operator, i.e., the persona who deploys the validator node. | -| account_private_key |The private key associated with the blockchain account. | -| consensus_private_key | The consensus private key, used only by the operator for validation purpose and for rotating the consensus key. | -| network_private_key | The private key for the validator node. Whoever holds this private key will be able to establish the ownership of the validator node in the Aptos network. | - - -### validator-full-node-identity.yaml - -#### Example - -Click below to see an example YAML configuration: - -
-validator-full-node-identity.yaml - - -```yaml ---- -account_address: a5a643aa695fc5f34927386c8d767cddcc0607933f40c89a7ad78de7804965b8 -network_private_key: "0x689c11c6e5405219b5eae1312086c801e3a044946afc74429e5157b46fb65b61" -``` - -
- - -#### Description - -| validator-full-node-identity.yaml | Description | -| --- | --- | -| account_address | The Aptos blockchain account address for the operator, i.e., the persona who deploys the validator node. | -| network_private_key | The private key for the fullnode. Whoever holds this private key will be able to establish the ownership of the VFN and PFN in the Aptos network. | - - - diff --git a/developer-docs-site/docs/nodes/leaderboard-metrics.md b/developer-docs-site/docs/nodes/leaderboard-metrics.md deleted file mode 100644 index 975c4c203e0e0..0000000000000 --- a/developer-docs-site/docs/nodes/leaderboard-metrics.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -title: "Leaderboard Metrics" -slug: "leaderboard-metrics" ---- - -# Leaderboard Metrics - -This document explains how the rewards for validator are evaluated and displayed on the [Aptos Validator Status](https://explorer.aptoslabs.com/validators/all?network=mainnet) page. - -## How rewards are calculated - -:::tip Staking documentation -For a backgrounder on staking with explanations of epoch, rewards and how to join and leave validator set, see the [Staking](../concepts/staking.md). -::: - -- An epoch starts with a finalized validator set. During the epoch, only validators in this validator set will vote. -- Epoch value on the mainnet is set by the governance. See [Aptos Blockchain Networks](./networks.md) for epoch values for other Aptos networks. -- During the epoch, following the process described in [Validation on the Aptos blockchain](../concepts/staking.md#validation-on-the-aptos-blockchain), a validator is selected as a leader to make a proposal. Because the validator set is unchanged during the course of an epoch, this will result in a validator being selected multiple times as a leader in an epoch. -- On successful proposals, i.e., proposals achieving the quorum consensus, the leaders earn rewards based on their stake and on the reward rate that is configured on-chain. The reward rate is the same for every validator. -- If all the proposals in an epoch achieve quorum consensus, a validator earns the maximum reward for the epoch. **Rewards are given only to the leader validators, and not to the voters.** -- On failed proposals, i.e., a proposal that did not achieve the quorum consensus, the leaders do not earn any reward for that proposal. -- If all the proposals in an epoch fail, a validator earns zero rewards for that epoch. - -### Example - -:::tip Rewards are given only at the end of the epoch -Note that the rewards are given only at the end of the epoch, not on every block. -::: - -The reward a leader receives is calculated by multiplying the maximum possible reward with the leader's proposal success rate. For example: -- A leader with 8 successful and 2 failed proposals will receive 80% of maximum reward. -- Similarly, another leader with 80 successful and 20 failed proposal will also receive 80% of maximum reward. -- Also, two leaders with no failures but one with 10 and another with 100 successful proposals will get the same % of the maximum reward. - -:::tip Reward rate is the same -Note also that the reward rate is the same for every validator. Hence the maximum reward is directly proportional to the staking amount, i.e., the more a validator stakes, the more the validator can earn in absolute terms. -::: - -## Rewards performance - -- The REWARDS PERFORMANCE column shows the rewards performance of a validator. It is calculated as a % of reward earned by the validator out of the maximum reward earning opportunity i.e., `(rewards earned across the epochs)/(maximum reward opportunity across the epochs)`. **This is a cumulative metric across all the epochs.** -- A validator can improve their performance by improving their proposal success rate. - -## Last epoch performance - -The LAST EPOCH PERFORMANCE column shown on the leaderboard is reported as `(number of successful proposals)/(number of total proposal opportunities)`. -- This metric gives you an early indicator if your performance is slowly reducing. -- You can see the JSON dump (link on the leaderboard) to see the performance across all the epochs. -- On mouse hover, you can see the last epoch for the validator. - -## Governance votes - -The GOVERNANCE VOTES column shown on the leaderboard is reported as `(governance proposals voted on)/(total governance votes)`. - - - diff --git a/developer-docs-site/docs/nodes/local-testnet/index.md b/developer-docs-site/docs/nodes/local-testnet/index.md deleted file mode 100644 index 6fb99a972c370..0000000000000 --- a/developer-docs-site/docs/nodes/local-testnet/index.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: "Develop with Local Testnet" -slug: "local-testnet-index" ---- - -# Develop with Local Testnet - -## Recommended -Most devs should use the CLI to run a local development network. It is simpler and more fully featured than just a single node local testnet + faucet. If you want a local stack that works just like a production network ([Node API](../aptos-api-spec.md) + [Transaction Stream](../../indexer/txn-stream/index.md) + [Indexer API](../../indexer/api/index.md) + Faucet), this is the option for you. - -- ### [Run a Local Development Network](../../guides/local-development-network.md) - -## Specialized -If you're developing the core Aptos node software itself or have complex testing needs, consider these guides. - -- ### [Run a Local Testnet with Validator](./run-a-local-testnet.md) -- ### [Run a Local Multinode Network](../../guides/running-a-local-multi-node-network.md) - diff --git a/developer-docs-site/docs/nodes/local-testnet/run-a-local-testnet.md b/developer-docs-site/docs/nodes/local-testnet/run-a-local-testnet.md deleted file mode 100755 index 4924818fb94bc..0000000000000 --- a/developer-docs-site/docs/nodes/local-testnet/run-a-local-testnet.md +++ /dev/null @@ -1,112 +0,0 @@ ---- -title: "Run a Local Testnet with Validator" -slug: "run-a-local-testnet" -sidebar_position: 9 ---- - -# Run a Local Testnet with Validator - -:::tip Using the CLI to run a local development network -Running a local testnet with the Aptos CLI is simpler and more fully featured. Learn how by following this guide: [Run a Local Development Network with the CLI](../../guides/local-development-network.md). -::: - -You can run a local testnet of the Aptos blockchain. This local testnet will not be connected to the Aptos devnet. It will run on your local machine, independent of other Aptos networks. You can use this local testnet for testing and development purposes. - -You can run a local testnet in two ways: - -1. Using the Aptos-core source code. This approach is useful for testing modifications to the Aptos-core codebase or to the Aptos Framework. - -2. Using Docker. This is particularly useful for building services on top of the Aptos blockchain or the Aptos Framework, as there is no build overhead and the ledger persists across network restarts (by default). - -The rest of this document describes: - -- How to start your local testnet with a single validator node, and -- How to start a Faucet service and attach it to your local testnet. - -## Using the Aptos-core source code - -1. Follow steps in [Building Aptos From Source](../../guides/building-from-source.md) - -1. With your development environment ready, now you can start your testnet network. Before you proceed, make a note of the following: - - :::tip - - When you run the below command to start the local testnet, your terminal will enter into an interactive mode, with a message `Aptos is running, press ctrl-c to exit`. Hence, you will need to open another shell terminal for the subsequent steps described in this section. - - After the below command runs, copy the `Test dir` information from the terminal output for the next step. - ::: - - To start your testnet locally, run the following command: - - ``` - CARGO_NET_GIT_FETCH_WITH_CLI=true cargo run -p aptos-node -- --test - ``` - - See below for an example of the partial output. Make a note of the `Test dir` from the output. - - ``` - ... - ... - ... - - Completed generating configuration: - Log file: "/private/var/folders/gn/m74t8ylx55z935q8wx035qn80000gn/T/b3adc18c144bfcc78a1541953893bc1c/validator.log" - Test dir: "/private/var/folders/gn/m74t8ylx55z935q8wx035qn80000gn/T/b3adc18c144bfcc78a1541953893bc1c/0/node.yaml" - Aptos root key path: "/private/var/folders/gn/m74t8ylx55z935q8wx035qn80000gn/T/b3adc18c144bfcc78a1541953893bc1c/mint.key" - Waypoint: 0:47e676b5fe38ebe2aec6053db7b3daa0b805693d6422e3475e46e89499464ecf - ChainId: TESTING - REST API endpoint: 0.0.0.0:8080 - Fullnode network: /ip4/0.0.0.0/tcp/7180 - Aptos is running, press ctrl-c to exit - ``` - -**NOTE**: The above command starts a local testnet with a single validator node. The command runs `aptos-node` from a genesis-only ledger state. If you want to reuse the ledger state produced by a previous run of `aptos-node`, then use: - -``` -cargo run -p aptos-node -- --test --config -``` - -### Attaching a Faucet to your testnet - -Faucets are stateless services that can be run in parallel with the testnet. A Faucet is a way to create Aptos test coins with no real-world value. You can use the Faucet by sending a request to create coins and transfer them into a given account on your behalf. - -1. Make sure that you started your local testnet as described in Step 5 above. -2. Open a new shell terminal. -3. Copy the _Aptos root key path_ from your terminal where you started the testnet, and use it to replace the `mint-key-file-path` in the below command. -4. Run the following command to start a Faucet: -``` -cargo run --package aptos-faucet-service -- run-simple --key-file-path "/tmp/694173aa3bbe019499bbd5cf3fe0e2fc/mint.key" --node-url http://127.0.0.1:8080 --chain-id TESTING -``` - -This will start a Faucet running locally without any restrictions to tokens that can be claimed and minted. This Faucet service will be as accessible as the testnet you started above. - -## Interacting with the local test testnet - -After starting your local testnet, you will see the following: - -``` -Entering test mode, this should never be used in production! -Completed generating configuration: - Log file: "/tmp/694173aa3bbe019499bbd5cf3fe0e2fc/validator.log" - Test dir: "/tmp/694173aa3bbe019499bbd5cf3fe0e2fc/0/node.yaml" - Aptos root key path: "/tmp/694173aa3bbe019499bbd5cf3fe0e2fc/mint.key" - Waypoint: 0:197bc8b76761622c2d2054d8bf93c1802fa0eb4bc55f0f3d4442878fdecc297f - ChainId: TESTING - REST API endpoint: 0.0.0.0:8080 - Fullnode network: /ip4/0.0.0.0/tcp/7180 - -Aptos is running, press ctrl-c to exit -``` - -Use the [Aptos CLI tool](../../tools/aptos-cli/install-cli/index.md) to interact with your local testnet. The above output contains information you will use for starting the [Aptos CLI tool](../../tools/aptos-cli/use-cli/use-aptos-cli.md): - -* `Aptos root key path`: The root key (also known as the mint or faucet key) controls the account that can mint tokens. Available in the docker compose folder under `aptos_root_key`. -* `Waypoint`: A verifiable checkpoint of the blockchain (available in the docker compose folder under waypoint.txt) -* `REST endpoint`: The endpoint for the REST service, e.g., `http://127.0.0.1:8080`. -* `ChainId`: The chain ID uniquely distinguishes this network from other blockchain networks. - -## Next steps - -At this point, you will have a special root account at `0x1` that can perform the mint operation. Follow up with: - -* [Your first transaction](../../tutorials/first-transaction.md) to learn how to submit transactions. -* [Your first Move module](../../tutorials/first-move-module.md) to learn how to create Move modules. -* [Interacting with the Aptos Blockchain](../../tutorials/first-coin.md) to learn how to mint coins. diff --git a/developer-docs-site/docs/nodes/measure/index.md b/developer-docs-site/docs/nodes/measure/index.md deleted file mode 100644 index d8d7a88ff8447..0000000000000 --- a/developer-docs-site/docs/nodes/measure/index.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -title: "Measure your Nodes" -slug: "measure-index" ---- - -# Measure your Nodes - -You have numerous options for measuring and inspecting the health and performance of your nodes: - -- ### [Node Liveness Criteria](../validator-node/operator/node-liveness-criteria.md) -- ### [Use the Node Inspection Service](./node-inspection-service.md) -- ### [Use the Node Health Checker](./node-health-checker.md) -- ### [Read the Node Health Checker FAQ](./node-health-checker-faq.md) diff --git a/developer-docs-site/docs/nodes/measure/node-health-checker-faq.md b/developer-docs-site/docs/nodes/measure/node-health-checker-faq.md deleted file mode 100755 index b2215f7cec61e..0000000000000 --- a/developer-docs-site/docs/nodes/measure/node-health-checker-faq.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -title: "Node Health Checker FAQ" -slug: "node-health-checker-faq" ---- - -# Node Health Checker FAQ - -The Aptos Node Health Checker (NHC) service can be used to check the health of your node(s). See [Node Health Checker](./node-health-checker.md) for full documentation on the NHC. - -The purpose of this FAQ is to help you understand why your node did not pass a particular health check when you ran NHC for it. If you didn't find the information you wanted in this FAQ, [open an issue](https://github.com/aptos-labs/aptos-core/issues/new/choose), or [open a PR](https://github.com/aptos-labs/aptos-core/pulls) and add the information yourself. - -## How does the latency evaluator work? - -You are likely here because you were given an NHC evaluation result like this: - -``` -Average latency too high: The average latency was 1216ms, which is higher than the maximum allowed latency of 1000ms. -``` - -While the NHC reports 1216ms above, when you `ping` you might see a latency like 600ms. This difference is because when you `ping` an IP, the result you see is a single round trip (where the latency is the round trip time (RTT)). On the other hand, the NHC latency test will a request to the API running on your node. In effect, this means that the NHC will time 2 round trips, because it does the following: - -1. SYN -2. SYNACK -3. ACK + Send HTTP request -4. Receive HTTP response - -i.e., the NHC must do a TCP handshake (one round trip) and then make an HTTP request (second round trip). - -The reason the NHC uses the latency evaluator is to ensure that we can maintain good network performance. In particular, if the latency to your node is too high, it will result in a low TPS and high time to finality, both of which are very important to running a highly performant L1 blockchain. **If you receive this error, you will need to try and improve the latency to your node. We have set high thresholds on this value with the understanding that nodes will be running all over the world**. diff --git a/developer-docs-site/docs/nodes/measure/node-health-checker.md b/developer-docs-site/docs/nodes/measure/node-health-checker.md deleted file mode 100755 index 33d3e122839f5..0000000000000 --- a/developer-docs-site/docs/nodes/measure/node-health-checker.md +++ /dev/null @@ -1,172 +0,0 @@ ---- -title: "Node Health Checker" -slug: "node-health-checker" ---- - -# Node Health Checker - -The Aptos Node Health Checker (NHC) service can be used to check the health of the following Aptos node types: - -- Validator nodes. -- Validator fullnodes, and -- Public fullnodes. - -If you are a node operator, use the NHC service to check if your node is running correctly. The NHC service evaluates your node's health by comparing against a baseline node configuration, and outputs the evaluation results. - -The Aptos Node Health Checker now also supports fullnodes via a central web service with validator support on the way: -https://nodetools.aptosfoundation.org/#/node_checker - -This document describes how to run NHC locally when you are operating a node. - -## Quickstart - -Before you get into the details of how NHC works, you can run the below steps to start the NHC service and send it a request. This quickstart uses a baseline configuration for a devnet fullnode, i.e., it will evaluate your node against a devnet fullnode that is configured with the baseline configuration YAML. - -**Important**: If your local node is not a devnet fullnode, you must use a different baseline config. See [the configuration examples in aptos-core](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/node-checker/configuration_examples) for other such example configs. - -### Step 1: Download the baseline configuration YAML - -Download a baseline configuration YAML file for a devnet fullnode. The below command will download the `devnet_fullnode.yaml` configuration file: - -``` -mkdir /tmp/nhc -cd /tmp/nhc -wget https://raw.githubusercontent.com/aptos-labs/aptos-core/main/ecosystem/node-checker/configuration_examples/devnet_fullnode.yaml -``` - -### Step 2: Start the NHC service - -Start the NHC service by providing the above-downloaded `devnet_fullnode.yaml` baseline configuration YAML file: - -``` -docker run -v /tmp/nhc:/nhc -p 20121:20121 -t aptoslabs/node-checker:nightly /usr/local/bin/aptos-node-checker server run --baseline-config-paths /nhc/devnet_fullnode.yaml -``` - -### Step 3: Send a request to NHC service - -Finally, send a request to the NHC service you started above. The following command runs health checks of your node that is at `node_url=http://mynode.mysite.com` and compares these results with the node configured in the baseline configuration `devnet_fullnode`: - -``` -curl 'http://localhost:20121/check?node_url=http://mynode.mysite.com&api_port=80&baseline_configuration_id=devnet_fullnode' -``` - -You will see output similar to this: - -``` -{ - "check_results": [ - { - "headline": "Chain ID reported by baseline and target match", - "score": 100, - "explanation": "The node under investigation reported the same Chain ID 18 as is reported by the baseline node", - "checker_name": "node_identity", - "links": [] - }, - { - "headline": "Role Type reported by baseline and target match", - "score": 100, - "explanation": "The node under investigation reported the same Role Type full_node as is reported by the baseline node", - "checker_name": "node_identity", - "links": [] - }, - { - "headline": "Target node produced valid recent transaction", - "score": 100, - "explanation": "We were able to pull the same transaction (version: 3238616) from both your node and the baseline node. Great! This implies that your node is keeping up with other nodes in the network.", - "checker_name": "transaction_availability", - "links": [] - } - ], - "summary_score": 100, - "summary_explanation": "100: Awesome!" -} -``` - -## How NHC works - -The NHC runs as a service. When you want to run a health check of your node, you send the HTTP requests to this service. - -A single NHC instance can be configured to check the health of multiple node configurations, each of different type, for example: - -- A public fullnode connected to the Aptos mainnet. -- A validator node connected to the Aptos testnet. -- A node running in a single node testnet. - -### Baseline configuration - -In all the above cases, a baseline node is used to compare your node's health. For example, for a public fullnode connected to the Aptos devnet, the baseline node might be a node run by the Aptos team and this node demonstrates optimal performance and participation characteristics. - -You will download the baseline configuration YAML before running the NHC service for your node. The baseline node's configuration YAML describes where to find this baseline node (URL + port), what evaluators (e.g. metrics checks, TPS tests, API validations, etc.) the NHC service should run, what parameters the NHC should use for those evaluators, what name the configuration has, and so on. See these [example baseline configuration YAML files](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/node-checker/configuration_examples). - -When you send requests to the NHC service, you must include a baseline configuration. For example, a request to NHC to use `devnet_fullnode` as the baseline configuration will look like this: - -``` -curl 'http://nhc.aptoslabs.com/check?node_url=http://myfullnode.mysite.com&baseline_configuration_id=devnet_fullnode' -``` - -### Getting baseline configurations ready - -In order to run the NHC service, you must have a baseline configuration that the service can use. You have two options here: - -#### Configure a pre-existing YAML - -You can find a few [example baseline configuration YAML files](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/node-checker/configuration_examples) that work for each of the above use cases and more. - -Next, download these configuration YAML files into the `/etc/nhc` folder in your host system. For example: - -``` -mkdir /tmp/nhc -cd /tmp/nhc -configs=(devnet_fullnode testnet_fullnode mainnet_fullnode); for c in ${configs[@]}; do wget https://raw.githubusercontent.com/aptos-labs/aptos-core/main/ecosystem/node-checker/configuration_examples/$c.yaml; done -``` - -These configurations are not quite ready to be used as they are. You will need to modify certain fields, such as the baseline node address or evaluator set (`evaluators` and `evaluator_args` in the YAML) used. The best way to iterate on this is to run the NHC with a downloaded baseline configuration and see what it says on startup. - -### Required files - -For some NHC configurations, you will need accompanying files, e.g. `mint.key` to use for running a TPS test against a validator. You should make sure these files are also available to NHC, either on disk or mounted into your container. NHC expects them on startup at a path specified in the baseline configuration YAML. - -## Running NHC: Docker - -:::tip -While the Aptos team hosts our own instances of this service, we encourage node operators to run their own instances. -::: - -When you are ready with baseline configuration YAML and the required files, you can run the NHC server with a command like this, for example, with Docker: - -``` -docker run -v /etc/nhc:/etc/nhc -p 20121:20121 -t aptoslabs/node-checker:nightly /usr/local/bin/aptos-node-checker server run --baseline-config-paths /tmp/nhc/devnet_fullnode.yaml /tmp/nhc/testnet_fullnode.yaml /tmp/nhc/mainnet/fullnode.yaml -``` - -:::tip -You may want to include other environment variables such as `RUST_LOG=info`. As you can see, by default NHC runs on port 20121. Make sure to publish it from the container, as shown in the above command, and ensure the port is open on your host. You may change the port NHC runs on with `--listen-port`. -::: - -## Running NHC: Source - -First, check out the source: - -``` -git clone git@github.com:aptos-labs/aptos-core.git -cd aptos-core -``` - -Depending on your setup, you may want to check out a particular branch, to ensure NHC is compatible with your node, e.g. `git checkout --track devnet`. - -Run NHC: - -``` -cargo run -p aptos-node-checker --release -- server run --baseline-config-paths /tmp/nhc/devnet_fullnode.yaml -``` - -## Generating the OpenAPI specs - -To generate the OpenAPI specs, run the following commands from `ecosystem/node-checker`: - -``` -cargo run -- server generate-openapi -f yaml > doc/spec.yaml -cargo run -- server generate-openapi -f json > doc/spec.json -``` - -You can also hit the `/spec.yaml` and `/spec.json` endpoints of the running service. - diff --git a/developer-docs-site/docs/nodes/measure/node-inspection-service.md b/developer-docs-site/docs/nodes/measure/node-inspection-service.md deleted file mode 100755 index cb3577e6b28ad..0000000000000 --- a/developer-docs-site/docs/nodes/measure/node-inspection-service.md +++ /dev/null @@ -1,194 +0,0 @@ ---- -title: "Node Inspection Service" -slug: "node-inspection-service" ---- - -# Node Inspection Service - -Aptos nodes collect metrics and system information while running. These metrics provide a way to track, -monitor and inspect the health and performance of the node dynamically, at runtime. Node metrics and system -information can be queried or exported via an inspection service that runs on each node. - -You can configure various aspects of the node inspection service. This document describes how to expose and see the metrics locally, on the respective node. You may also view these metrics remotely by making the port publicly accessible via firewall rules. Generally, validator nodes don't expose these metrics for security, yet fullnodes do so the [health checker](./node-health-checker.md) can verify them. - -If you do make the inspection service port publicly accessible on your validator node, we recommend disabling that access when not in use. - -## Examining node metrics - -If you'd like to examine the metrics of your node (validator or fullnode), start running a -node and review the inspection service locally by loading this URL in your browser: - -``` -http://localhost:9101/metrics -``` - -This will display the values of all the metrics and counters of your node at the time you queried it. -To see updates to these values, simply refresh the page. - -Likewise, if you wish to view the metrics in `json` format, visit the following URL: - -``` -http://localhost:9101/json_metrics -``` - -:::tip Inspection service configuration -See additional configuration -details below. -::: - -## Change inspection service port - -The inspection service should run on all nodes by default, at port `9101`. To change -the port the inspection service listens on (e.g., to `1000`), add the following to your node -configuration file: - -```yaml - inspection_service: - port: 1000 -``` - -## Expose system configuration - -The inspection service also provides a way to examine the configuration of your node -at runtime (i.e., the configuration settings that your node started with). - -:::caution Proceed with caution -By default, the configuration endpoint is disabled as it may expose potentially sensitive -information about the configuration of your node, e.g., file paths and directories. We -recommend enabling this endpoint only if the inspection service is not publicly accessible. -:::` - -To enable this feature, add the following to your node configuration file: - -```yaml - inspection_service: - expose_configuration: true -``` - -And visit the configuration URL: - -``` -http://localhost:9101/configuration -``` - -## Expose system information - -Likewise, the inspection service also provides a way to examine the system information of your node -at runtime (i.e., build and hardware information). Simply visit the following url: - -``` -http://localhost:9101/system_information -``` - -If you'd like to disable this endpoint, add the following to your node configuration file: - -```yaml - inspection_service: - expose_system_information: false -``` - -:::tip System information accuracy -The system information displayed here is not guaranteed to be 100% accurate due to limitations -in the way this information is collected. As a result, we recommend not worrying about any -inaccuracies and treating the information as an estimate. -:::` - -## Understand node metrics - -When you visit the metrics endpoint, you will notice that there are a large number of metrics -and counters being produced by your node. Most of these metrics and counters are useful only for -blockchain development and diagnosing hard-to-find issues. As a result, we recommend that node -operators ignore most metrics and pay attention to only the key metrics presented below: - -:::caution Metric instability -As Aptos continues to grow and develop the blockchain software, many metrics will come and go. -As a result, we recommend relying on the presence of only the metrics explicitly mentioned below. -All other metrics should be considered unstable and may be changed/removed without warning. -::: - -### Consensus - -If you are running a validator node, the following -[consensus](../../concepts/blockchain.md#consensus) metrics are important: -1. `aptos_consensus_proposals_count`: Counts the number of times the node sent a block -proposal to the network. The count will increase only when the validator is chosen to be a proposer, -which depends on the node's stake and leader election reputation. You should expect this metric to -increase at least once per hour. -2. `aptos_consensus_last_committed_round`: Counts the last committed round of the node. -During consensus, we expect this value to increase once per consensus round, which should be multiple -times per second. If this does not happen, it is likely the node is not participating in consensus. -3. `aptos_consensus_timeout_count`: Counts the number of times the node locally timed out while trying -to participate in consensus. If this counter increases, it is likely the node is not participating -in consensus and may be having issues, e.g., network difficulties. -4. `aptos_state_sync_executing_component_counters{label="consensus"`: This counter increases -a few times per second as long as the node is participating in consensus. When this counter stops -increasing, it means the node is not participating in consensus, and has likely fallen back to state -synchronization (e.g., because it fell behind the rest of the validators and needs to catch up). - -### State sync - -If you are running a fullnode (or a validator that still needs to synchronize to the latest blockchain -state), the following [state sync](../../guides/state-sync.md) metrics are important: -1. `aptos_state_sync_version{type="synced"}`: This metric displays the current synced version of the node, -i.e., the number of transactions the node has processed. If this metric stops increasing, it means the -node is not syncing. Likewise, if this metric doesn't increase faster than the rate at which new transactions -are committed to the blockchain, it means the node is unlikely to get and stay up-to-date with the rest of -the network. Note: if you've selected to use [fast sync](../../guides/state-sync.md#fast-syncing), -this metric won't increase until all states have been downloaded, which may take some time. See (3) below. -2. `aptos_data_client_highest_advertised_data{data_type="transactions"}`: This metric displays the highest -version synced and advertised by the peers that your node is connected to. As a result, when this metric is -higher than `aptos_state_sync_version{type="synced"}` (above) it means your node can see new blockchain data and -will sync the data from its peers. -3. `aptos_state_sync_version{type="synced_states"}`: This metric counts the number of states that have been -downloaded while a node is [fast syncing](../../guides/state-sync.md#fast-syncing). If this metric doesn't increase, -and `aptos_state_sync_version{type="synced"}` doesn't increase (from above), it means the node is not syncing -at all and an issue has likely occurred. -4. `aptos_state_sync_bootstrapper_errors` and `aptos_state_sync_continuous_syncer_errors`: If your node is -facing issues syncing (or is seeing transient failures), these metrics will increase each time an error occurs. -The `error_label` inside these metrics will display the error type. - -:::tip -Compare the synced version shown by `aptos_state_sync_version{type="synced"}` with the highest version shown on the [Aptos Explorer](https://explorer.aptoslabs.com/?network=mainnet) to see how far behind the latest blockchain version your node is. Remember to select the correct network that your node is syncing to (e.g., `mainnet`). -::: - -### Networking - -The following network metrics are important, for both validators and fullnodes: -1. `aptos_connections{direction="inbound"` and `aptos_connections{direction="outbound"`: These metrics count -the number of peers your node is connected to, as well as the direction of the network connection. An `inbound` -connection means that a peer (e.g., another fullnode) has connected to you. An `outbound` connection means that -your node has connected to another node (e.g., connected to a validator fullnode). - 1. If your node is a validator, the sum of both `inbound` and `outbound` connections should be equal to the - number of other validators in the network. Note that only the sum of these connections matter. If all connections - are `inbound`, or all are `outbound`, this doesn't matter. - 2. If your node is a fullnode, the number of `outbound` connections should be `> 0`. This will ensure your node is - able to synchronize. Note that the number of `inbound` connections matters only if you want to act as a seed in - the network and allow other nodes to connect to you as discussed - [Fullnode Network Connections](../../nodes/full-node/fullnode-network-connections.md#allowing-fullnodes-to-connect-to-your-node). - - -### Mempool - -The following [mempool](../../concepts/blockchain.md#mempool) metrics are important: -1. `core_mempool_index_size{index="system_ttl"`: This metric displays the number of transactions currently sitting in -the mempool of the node and waiting to be committed to the blockchain: - 1. If your node is a fullnode, it's highly unlikely that this metric will be `> 0`, unless transactions are actively - being sent to your node via the REST API and/or other fullnodes that have connected to you. Most fullnode operators - should ignore this metric. - 2. If your node is a validator, you can use this metric to see if transactions from your node's mempool are being - included in the blockchain (e.g., if the count decreases). Likewise, if this metric only increases, it means - that either: (i) your node is unable to forward transactions to other validators to be included in the blockchain; or - (ii) that the entire blockchain is under heavy load and may soon become congested. - -### REST API - -The following [REST API](https://aptos.dev/nodes/aptos-api-spec) metrics are important: -1. `aptos_api_requests_count{method="GET"` and `aptos_api_requests_count{method="POST"`: These metrics count -the number of REST API `GET` and `POST` requests that have been received via the node's REST API. This -allows you to monitor and track the amount of REST API traffic on your node. You can also further use the -`operation_id` in the metric to monitor the types of operations the requests are performing. - -2. `aptos_api_response_status_count`: This metric counts the number of response types that were sent for -the REST API. For example, `aptos_api_response_status_count{status="200"}` counts the number of requests -that were successfully handled with a `200` response code. You can use this metric to track the success and -failure rate of the REST API traffic. diff --git a/developer-docs-site/docs/nodes/networks.md b/developer-docs-site/docs/nodes/networks.md deleted file mode 100755 index a2824fcfee566..0000000000000 --- a/developer-docs-site/docs/nodes/networks.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -title: "Networks" -slug: "networks" -hide_table_of_contents: true ---- - -# Aptos Networks - -|Description |Mainnet | Testnet | Devnet | -|--------------------------------------------|---|---|---| -|**REST API** | https://fullnode.mainnet.aptoslabs.com/v1 | https://fullnode.testnet.aptoslabs.com/v1 | https://fullnode.devnet.aptoslabs.com/v1 | -|**REST API Spec** | Link | Link | Link | -|**Indexer GraphQL API** | https://indexer.mainnet.aptoslabs.com/v1/graphql | https://indexer-testnet.staging.gcp.aptosdev.com/v1/graphql | https://indexer-devnet.staging.gcp.aptosdev.com/v1/graphql | -|**Indexer GraphQL API Spec** | Link | Link | Link | -|**[Dev Portal] Fullnode API** | https://api.mainnet.aptoslabs.com/v1 | https://api.testnet.aptoslabs.com/v1 | https://api.devnet.aptoslabs.com/v1 | -|**[Dev Portal] Indexer GraphQL API** | https://api.mainnet.aptoslabs.com/v1/graphql | https://api.testnet.aptoslabs.com/v1/graphql | https://api.devnet.aptoslabs.com/v1/graphql | -|**[Dev Portal] GRPC Transaction Stream** | https://grpc.mainnet.aptoslabs.com | https://grpc.testnet.aptoslabs.com | https://grpc.devnet.aptoslabs.com | -|**Faucet** | No Faucet | https://faucet.testnet.aptoslabs.com/ | https://faucet.devnet.aptoslabs.com/ | -|**Genesis and Waypoint** | https://github.com/aptos-labs/aptos-networks/tree/main/mainnet | https://github.com/aptos-labs/aptos-networks/tree/main/testnet| https://github.com/aptos-labs/aptos-networks/tree/main/devnet | -|**Chain ID** | 1 | 2 | [On Aptos Explorer **select Devnet from top right**](https://explorer.aptoslabs.com/?network=Devnet).| -|**Epoch duration** | 7200 seconds |7200 seconds | 7200 seconds | -|**Network providers** | Fully decentralized. | Managed by Aptos Labs on behalf of Aptos Foundation. | Managed by Aptos Labs on behalf of Aptos Foundation. | -|**Release cadence** | Monthly | Monthly | Weekly | -|**Wipe cadence** | Never. | Never. | On update. | -|**Purpose** | The main Aptos network. | Long-lived test network. | Bleeding edge and exploratory. | -|**Network status** | Always live. | Always live. | Almost always live, with brief interruptions during updates. | diff --git a/developer-docs-site/docs/nodes/node-files-all-networks/node-files-devnet.md b/developer-docs-site/docs/nodes/node-files-all-networks/node-files-devnet.md deleted file mode 100644 index ce7c31ad84d5c..0000000000000 --- a/developer-docs-site/docs/nodes/node-files-all-networks/node-files-devnet.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -title: "Node Files For Devnet" -slug: "node-files-devnet" ---- - -# Node Files For Devnet - -When you are deploying an Aptos node in the **devnet**, you will need to download the files listed on this page. - -- **Mainnet:** If you are deploying in the mainnet, download the files from the [Node Files For Mainnet](./node-files.md) page. -- **Testnet:** If you are deploying in the testnet, download the files from the [Node Files For Testnet](./node-files-testnet.md) page. - ---- - -These files can be downloaded from separate `aptos-labs` repos on GitHub. The `wget` commands provided below will work on macOS and Linux. Open a terminal and paste the `wget` command to download the file. - -:::tip Files for the validator node -Unless specified, all these files are required for validator node. A file with `fullnode` in its filename is required for either a validator fullnode or a public fullnode. -::: - -## docker-compose.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/docker-compose.yaml - ``` - -## validator.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O validator.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/validator.yaml - ``` - -## genesis.blob - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O genesis.blob https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/devnet/genesis.blob - ``` - -## waypoint.txt - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O waypoint.txt https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/devnet/waypoint.txt - ``` - -## docker-compose-src.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-src.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/docker-compose-src.yaml - ``` - -## haproxy.cfg - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/haproxy.cfg - ``` - -## blocked.ips - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O blocked.ips https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/blocked.ips - ``` - -## docker-compose-fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/docker-compose-fullnode.yaml - ``` - -## fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/fullnode.yaml - ``` - -## haproxy-fullnode.cfg (fullnode only) - -- **Git repo:** `aptos-core` -- **Git branch:** `devnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy-fullnode.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/devnet/docker/compose/aptos-node/haproxy-fullnode.cfg - ``` diff --git a/developer-docs-site/docs/nodes/node-files-all-networks/node-files-testnet.md b/developer-docs-site/docs/nodes/node-files-all-networks/node-files-testnet.md deleted file mode 100644 index 1c079cd1abbb2..0000000000000 --- a/developer-docs-site/docs/nodes/node-files-all-networks/node-files-testnet.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -title: "Node Files For Testnet" -slug: "node-files-testnet" ---- - -# Node Files For Testnet - -When you are deploying an Aptos node in the **testnet**, you will need to download the files listed on this page. - -- **Mainnet:** If you are deploying in the mainnet, download the files from the [Node Files For Mainnet](./node-files.md) page. -- **Devnet:** If you are deploying in the testnet, download the files from the [Node Files For Devnet](./node-files-devnet.md) page. - ---- - -These files can be downloaded from separate `aptos-labs` repos on GitHub. The `wget` commands provided below will work on macOS and Linux. Open a terminal and paste the `wget` command to download the file. - -:::tip Files for the validator node -Unless specified, all these files are required for validator node. A file with `fullnode` in its filename is required for either a validator fullnode or a public fullnode. -::: - -## docker-compose.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/docker-compose.yaml - ``` - -## validator.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O validator.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/validator.yaml - ``` - -## genesis.blob - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O genesis.blob https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/genesis.blob - ``` - -## waypoint.txt - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O waypoint.txt https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/testnet/waypoint.txt - ``` - -## docker-compose-src.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-src.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/docker-compose-src.yaml - ``` - -## haproxy.cfg - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/haproxy.cfg - ``` - -## blocked.ips - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O blocked.ips https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/blocked.ips - ``` - -## docker-compose-fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/docker-compose-fullnode.yaml - ``` - -## fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/fullnode.yaml - ``` - -## haproxy-fullnode.cfg (fullnode only) - -- **Git repo:** `aptos-core` -- **Git branch:** `testnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy-fullnode.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/testnet/docker/compose/aptos-node/haproxy-fullnode.cfg - ``` diff --git a/developer-docs-site/docs/nodes/node-files-all-networks/node-files.md b/developer-docs-site/docs/nodes/node-files-all-networks/node-files.md deleted file mode 100644 index 12d192643ed9f..0000000000000 --- a/developer-docs-site/docs/nodes/node-files-all-networks/node-files.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -title: "Node Files For Mainnet" -slug: "node-files" ---- - -# Node Files For Mainnet - -When you are deploying an Aptos node in the **mainnet**, you will need to download the files listed on this page. - -- **Devnet:** If you are deploying in the devnet, download the files from the [Node Files For Devnet](./node-files-devnet.md) page. -- **Testnet:** If you are deploying in the testnet, download the files from the [Node Files For Testnet](./node-files-testnet.md) page. - ---- - -These files can be downloaded from separate `aptos-labs` repos on GitHub. The `wget` commands provided below will work on macOS and Linux. Open a terminal and paste the `wget` command to download the file. - -:::tip Files for the validator node -Unless specified, all these files are required for validator node. A file with `fullnode` in its filename is required for either a validator fullnode or a public fullnode. -::: - -## docker-compose.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/docker-compose.yaml - ``` - -## validator.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O validator.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/validator.yaml - ``` - -## genesis.blob - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O genesis.blob https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/genesis.blob - ``` - -## waypoint.txt - -- **Git repo:** `aptos-networks` -- **Git branch:** `main` on https://github.com/aptos-labs/aptos-networks -- **Command to download:** - ```bash - wget -O waypoint.txt https://raw.githubusercontent.com/aptos-labs/aptos-networks/main/mainnet/waypoint.txt - ``` - -## docker-compose-src.yaml - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-src.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/docker-compose-src.yaml - ``` - -## haproxy.cfg - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/haproxy.cfg - ``` - -## blocked.ips - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O blocked.ips https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/blocked.ips - ``` - -## docker-compose-fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O docker-compose-fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/docker-compose-fullnode.yaml - ``` - -## fullnode.yaml (fullnode only) - -:::tip Fullnode -Fullnode means either a validator fullnode or a public fullnode. -::: - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O fullnode.yaml https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/fullnode.yaml - ``` - -## haproxy-fullnode.cfg (fullnode only) - -- **Git repo:** `aptos-core` -- **Git branch:** `mainnet` on https://github.com/aptos-labs/aptos-core -- **Command to download:** - ```bash - wget -O haproxy-fullnode.cfg https://raw.githubusercontent.com/aptos-labs/aptos-core/mainnet/docker/compose/aptos-node/haproxy-fullnode.cfg - ``` diff --git a/developer-docs-site/docs/nodes/nodes-landing.md b/developer-docs-site/docs/nodes/nodes-landing.md deleted file mode 100644 index 0deb88348ac66..0000000000000 --- a/developer-docs-site/docs/nodes/nodes-landing.md +++ /dev/null @@ -1,253 +0,0 @@ ---- -title: "Learn about Nodes" -slug: "nodes-landing" -hide_table_of_contents: true ---- - -# Learn about Nodes - -The Aptos network is comprised of nodes of three types: validator node, validator fullnode and public fullnode. To participate in consensus, you are required to run both a validator node and a validator fullnode, and stake. - -Also learn how to run a public fullnode on a local network and connect to either a testnet or a devnet. This section describes everything you need to stake and participate in consensus and governance. See also the [external resources](../community/external-resources.md) offered by your fellow node operators. - - - -## Validator operations - -
-
- - - -
-
- -## General - - diff --git a/developer-docs-site/docs/nodes/validator-node/_category_.json b/developer-docs-site/docs/nodes/validator-node/_category_.json deleted file mode 100644 index 60f7fc4d92223..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Running Nodes For Incentivized Testnet", - "position": 8 -} diff --git a/developer-docs-site/docs/nodes/validator-node/index.md b/developer-docs-site/docs/nodes/validator-node/index.md deleted file mode 100755 index 1beb8a523c3f0..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/index.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -title: "Run a Validator" -slug: "validators" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - - -# Run a Validator - -To participate in the consensus process in the Aptos mainnet, you must deploy and run a validator node and a validator fullnode. Optionally you can also run a public fullnode. This document presents a high-level conceptual overview of the important steps involved in deploying the nodes for validation. - -Before initializing a staking pool or delegation pool, read about [Staking](../../concepts/staking.md) and [Delegated Staking](../../concepts/delegated-staking.md) to learn the difference between the stake pool types. Note that once a stake pool has been created, it cannot be changed to a delegation pool or vice versa. - -See also the [external resources](../../community/external-resources.md) for tools and services offered by your fellow node operators. - -
-
-
-
-
-

1

-

Read the node requirements.

-

Select a deployment method. Use on-premises or cloud services.

-
-
-
-
-
-
-

2

-

Generate identity for nodes.

-

Account address and private and public keys come to exist.

-
-
-
-
-
-
-

3

-

Configure validator and validator fullnode.

-

Establishes network identity for the nodes. Ready to handshake with other nodes.

-
-
-
-
-
-
-

4

-

Insert genesis and waypoint to start the nodes.

-

Bootstrapped the nodes. Aptos network becomes aware of the nodes.

-
-
-
-
-
-
-

5

-

Join the validator set.

-

Initialize staking pool, bootstrap in production mode, start syncing. Begin validating and earn rewards.

-
-
-
-
-
- -Follow the documents within this section to run a validator node and a validator fullnode in Aptos. Here is a summary of the process: - -1. Start by reading the node requirements to get to know the compute, memory and storage resources you need. Note also the internet bandwidth requirements. -1. Select a method to deploy your nodes, i.e., use a cloud managed Kubernetes, Docker, or source code. -1. Generate identity for the nodes. This is the first step in progressively making your nodes secure and ready to be integrated into the Aptos network. -1. Using YAML files, configure your nodes with user and network identity. This step enables the nodes to be recognized by other nodes in the Aptos network. Handshaking is possible after this step. -1. With the node identity established for the Aptos network, next you install the necessary binaries and locally generate the genesis blob and waypoint files. These will allow the node to be connected to the Aptos network. -1. Bootstrap the nodes. The nodes now have the Aptos node binary running on them with the identity set. This fulfills the requirement for the Aptos network to become aware of your nodes. However, your nodes cannot connect to the Aptos network yet because these nodes are not yet in the validator set. On the Aptos network a validator can only accept another validator for connections. Until your nodes are in the validator set, they will be rejected by other validator nodes on the network. -1. Perform the required actions before joining the validator set. For this, you must perform a few tasks such as initializing a staking pool, delegating to operators and voters, downloading the latest versions of the genesis blob and waypoint text files and restarting your nodes. -1. Join the validator set. Other nodes will see your nodes and will establish connection to your nodes. Now you can stay in sync with the Aptos blockchain by building up your database of the history of the ledger. It takes some time for your nodes to build the database. Whenever your nodes reach the latest version of the blockchain, your validator node will be able to start participating in the consensus process. - diff --git a/developer-docs-site/docs/nodes/validator-node/operator/connect-to-aptos-network.md b/developer-docs-site/docs/nodes/validator-node/operator/connect-to-aptos-network.md deleted file mode 100644 index 60600a5b605d2..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/connect-to-aptos-network.md +++ /dev/null @@ -1,282 +0,0 @@ ---- -title: "Connect to Aptos Network" -slug: "connect-to-aptos-network" ---- - -# Connect to Aptos Network - -This document describes how to connect your running validator node and validator fullnode to an Aptos network. Follow these instructions only if your validator has met the minimal [staking](../../../concepts/staking.md) requirement. - -:::tip Minimum staking requirement -The current required minimum for staking is 1M APT tokens. -::: - - -## Initializing the stake pool - -First, you need to initialize the stake pool. - -To initialize a staking pool, follow the instructions in [staking pool operations.](../../../nodes/validator-node/operator/staking-pool-operations.md#initialize-cli) - -To initialize a delegation pool, follow the instructions in [delegation pool operations.](../../../nodes/validator-node/operator/delegation-pool-operations/#initialize-a-delegation-pool) - - -## Bootstrapping validator node - -After initializing the stake pool, make sure the validator node is bootstrapped with the correct [genesis blob and waypoint](../../node-files-all-networks/node-files.md) for the corresponding network. - -1. Follow the steps for [joining the validator set.](../../../nodes/validator-node/operator/connect-to-aptos-network/#joining-validator-set) -2. Bring up the validator fullnode (VFN) only. It will connect to other nodes and fast sync. -3. Once the VFN is synced, bring up the validator. It will sync from the VFN and then connect to other validators and start participating in consensus. - -To bootstrap your node, first you need to know the pool address to use: - - -```bash -aptos node get-stake-pool \ - --owner-address -``` - -### Using source code - -1. Stop your node and remove the data directory. - - **Make sure you remove the `secure-data.json` file also**. View [validator.yaml](https://github.com/aptos-labs/aptos-core/blob/e358a61018bb056812b5c3dbd197b0311a071baf/docker/compose/aptos-node/validator.yaml#L13) to see the location of the `secure-data.json` file. -2. Download the `genesis.blob` and `waypoint.txt` files published by Aptos. - - See [Node Files](../../node-files-all-networks/node-files.md) for your network (mainnet, testnet, or devnet) for the locations and commands to download these files. -3. Update your `account_address` in the `validator-identity.yaml` and `validator-fullnode-identity.yaml` files to your **pool address**. Do not change anything else. Keep the keys as they are. -4. Pull the latest changes from the associated (ex. `mainnet`) branch. -5. Close the metrics port `9101` and the REST API port `80` on your validator (you can leave it open for public fullnode). -6. Restart the validator node and validator fullnode. - -### Using Docker - -1. Stop your node and remove the data volumes: `docker compose down --volumes`. - - **Make sure you remove the `secure-data.json` file too.** See this [validator.yaml](https://github.com/aptos-labs/aptos-core/blob/e358a61018bb056812b5c3dbd197b0311a071baf/docker/compose/aptos-node/validator.yaml#L13) line for the location of the `secure-data.json` file. -2. Download the `genesis.blob` and `waypoint.txt` files published by Aptos. - - See [Node Files](../../node-files-all-networks/node-files.md) for locations and commands to download these files. -3. Update your `account_address` in the `validator-identity.yaml` and `validator-fullnode-identity.yaml` files to your **pool address**. -4. Update your Docker image to the [latest release](../../../releases/index.md) of the network branch (e.g. mainnet, testnet). -5. Close the metrics port `9101` and the REST API port `80` on your validator (remove it from the Docker compose file). You can leave it open for the public fullnode. -6. Restart the node with: `docker compose up` - -### Using Terraform - -1. Increase the `era` number in your Terraform configuration. When this configuration is applied, it will wipe the data. -2. Update `chain_id` to 1 (for mainnet). The chain IDs for other Aptos networks are in [Aptos Blockchain Networks](../../networks.md). -3. Update your Docker image to the [latest release](../../../releases/index.md) of the network branch (e.g. mainnet, testnet). -4. Close the metrics port and the REST API port for validator. - - -5. **Add monitoring components** - - :::tip Supported only using Terraform - This is currently only supported using Terraform. - ::: - - 1. Set the `enable_monitoring` variable in your terraform module. For example: - - ```rust - module "aptos-node" { - ... - enable_monitoring = true - utility_instance_num = 3 # this will add one more utility instance to run monitoring component - } - ``` - - 2. Apply the changes with: `terraform apply` - - 3. You will see a new pod getting created. Run `kubectl get pods` to check. - - 4. Access the dashboard. - - First, find the IP/DNS for the monitoring load balancer. - - ```bash - kubectl get svc ${WORKSPACE}-mon-aptos-monitoring --output jsonpath='{.status.loadBalancer.ingress[0]}' - ``` - - You can access the dashboard on `http://`. - - -7. Pull latest of the terraform module `terraform get -update`, and then apply Terraform: `terraform apply`. -8. Download the `genesis.blob` and `waypoint.txt` files published by Aptos. - - See [Node Files](../../node-files-all-networks/node-files.md) for locations and commands to download these files. -9. Update your `account_address` in the `validator-identity.yaml` and `validator-fullnode-identity.yaml` files to your **pool address**. Do not change anything else. Keep the keys as they are. -10. Recreate the secrets. Make sure the secret name matches your `era` number, e.g. if you have `era = 3`, then you should replace the secret name to be: - ```bash - ${WORKSPACE}-aptos-node-0-genesis-e3 - ``` - - ```bash - export WORKSPACE= - - kubectl create secret generic ${WORKSPACE}-aptos-node-0-genesis-e2 \ - --from-file=genesis.blob=genesis.blob \ - --from-file=waypoint.txt=waypoint.txt \ - --from-file=validator-identity.yaml=keys/validator-identity.yaml \ - --from-file=validator-full-node-identity.yaml=keys/validator-full-node-identity.yaml - ``` - -## Joining Validator Set - -Next, follow the below steps to set up the validator node using the operator account and join the validator set. This is required for your validator and validator fullnode to start syncing. - -:::tip Mainnet vs Testnet -The below CLI command examples use mainnet. Change the `--network` value for testnet and devnet. View the values in [Aptos Blockchain Networks](../../networks.md) to see how profiles can be configured based on the network. -::: - -### 1. Initialize Aptos CLI - - ```bash - aptos init --profile mainnet-operator \ - --network mainnet \ - --private-key \ - --skip-faucet - ``` - -:::tip -The `account_private_key` for the operator can be found in the `private-keys.yaml` file under `~/$WORKSPACE/keys` folder. -::: - -### 2. Check your validator account balance - -Make sure you have enough APT to pay for gas. You can check for this either on the Aptos Explorer or using the CLI: - -- On the Aptos Explorer `https://explorer.aptoslabs.com/account/?network=Mainnet`, or -- Use the CLI: - - ```bash - aptos account list --profile mainnet-operator - ``` - -This will show you the coin balance you have in the validator account. You will see an output like below: - -```json -"coin": { - "value": "5000" - } -``` - -:::tip Already in validator set? Skip to Step 6 -If you know you are already in the validator set, then skip steps 3, 4, and 5 and go directly to step 6 to confirm it. -::: - -### 3. Update validator network addresses on-chain - -```bash -aptos node update-validator-network-addresses \ - --pool-address \ - --operator-config-file ~/$WORKSPACE/$USERNAME/operator.yaml \ - --profile mainnet-operator -``` - -:::tip Important notes -The network address updates and the consensus key rotation will be applied only at the end of the current epoch. Note that the validator need not leave the validator set to make these updates. You can run the commands for address and key changes. For the remaining duration of the current epoch your validator will still use the old key and addresses but when the epoch ends it will switch to the new key and addresses. -::: - -### 4. Rotate the validator consensus key on-chain - -```bash -aptos node update-consensus-key \ - --pool-address \ - --operator-config-file ~/$WORKSPACE/$USERNAME/operator.yaml \ - --profile mainnet-operator -``` - -### 5. Join the validator set - -```bash -aptos node join-validator-set \ - --pool-address \ - --profile mainnet-operator -``` - -The validator set is updated at every epoch change. You will see your validator node joining the validator set only in the next epoch. Both validator and validator fullnode will start syncing once your validator is in the validator set. - -:::tip When is next epoch? -You can see it on the [Aptos Explorer](https://explorer.aptoslabs.com/validators/all?network=mainnet) or by running the command `aptos node get-stake-pool` as shown in [Checking your stake pool information](#checking-your-stake-pool-information). -::: - -### 6. Check the validator set - -When you join the validator set, your validator node will be in "Pending Active" state until the next epoch occurs. **During this time you might see errors like "No connected AptosNet peers". This is normal.** Run the below command to look for your validator in the "pending_active" list. - -```bash -aptos node show-validator-set --profile mainnet-operator | jq -r '.Result.pending_active' | grep -``` - -When the next epoch happens, the node will be moved into "active_validators" list. Run the below command to see your validator in the "active_validators" list: - -```bash -aptos node show-validator-set --profile mainnet-operator | jq -r '.Result.active_validators' | grep -``` - - You should expect the active value for your `StakePool` to keep increasing. It is updated at every epoch. - - -## Verify Node Correctness - -Now that you have joined the validator set, you should verify your node correctness. - -:::tip First time syncing? -Note that in some environments, e.g., `testnet`, your validator fullnode will begin syncing first (before your validator is able to sync). -This is normal behaviour. Once your validator fullnode has finished syncing, your validator node will start syncing and eventually start participating in consensus. -::: - -:::tip Node Liveness Definition -See [node liveness criteria](../operator/node-liveness-criteria.md) for details. -::: - -After your validator node has joined the validator set, you can validate its correctness by following these steps: - -1. Check if your node is state syncing. **Replace `127.0.0.1` with your validator IP/DNS if deployed on the cloud**. - - ```bash - curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_state_sync_version" - ``` - - You should expect to see the `synced` or `synced_states` versions increasing. The versions should start increasing - for your validator fullnode first, then eventually your validator node will start syncing. - -2. Verify that your validator is connecting to other peers on the network. - - ```bash - curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_connections{.*\"Validator\".*}" - ``` - - The command will output the number of inbound and outbound connections of your validator node. For example: - - ```bash - aptos_connections{direction="inbound",network_id="Validator",peer_id="f326fd30",role_type="validator"} 5 - aptos_connections{direction="outbound",network_id="Validator",peer_id="f326fd30",role_type="validator"} 2 - ``` - - As long as one of the metrics is greater than zero, your validator node is connected to at least one of the peers on the network. If your validator is not - connected to any peers, make sure your validator fullnode has completed syncing first. Once your validator fullnode has finished syncing, your validator - node will start syncing and eventually be able to connect to other peers. - -3. You can also check if your node is connected to an Aptos node: replace `` with the peer ID shared by Aptos team. - - ```bash - curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_network_peer_connected{.*remote_peer_id=\"\".*}" - ``` - -4. After your node state syncs to the latest version, you can also check if consensus is making progress, and your node is proposing. - - ```bash - curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_consensus_current_round" - - curl 127.0.0.1:9101/metrics 2> /dev/null | grep "aptos_consensus_proposals_count" - ``` - - You should expect to see this number keep increasing. - -5. Finally, the most straight forward way to see if your node is functioning properly is to check if it is making staking reward. You can check it on the Aptos Explorer: `https://explorer.aptoslabs.com/account/?network=Mainnet`: - - ```json - 0x1::stake::StakePool - - "active": { - "value": "100009129447462" - } - ``` diff --git a/developer-docs-site/docs/nodes/validator-node/operator/delegation-pool-operations.md b/developer-docs-site/docs/nodes/validator-node/operator/delegation-pool-operations.md deleted file mode 100644 index 84e5baad24bae..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/delegation-pool-operations.md +++ /dev/null @@ -1,166 +0,0 @@ ---- -title: "Delegation Pool Operations" -slug: "delegation-pool-operations" ---- - -# Delegation Pool Operations - -> Beta: This documentation is in experimental, beta mode. Supply feedback by [requesting document changes](../../../community/site-updates.md#request-docs-changes). See also the related [Staking Pool Operations](./staking-pool-operations.md) instructions. - -Validator operators should follow these instructions to carry out delegation pool operations for [delegated staking](../../../concepts/delegated-staking.md). You may delegate as little as 10 APT plus a small add stake fee that will be mostly refunded as rewards at the end of the current 2-hour epoch. You might notice that some UIs might use 11 APT as the minimum for a round number. Note that your validator will become part of the *Active Validator Set* only when the delegation pool satisfies the minimum cumulative [staking requirement of 1 million APT](./staking-pool-operations.md). - -The delegation pool owner should set an operator for the pool via the `set_operator` function described in the [Perform pool owner operations](#perform-pool-owner-operations) section. The operator should then start their own Aptos node, as it is a best practice to have a different account for owner and operator. Once the delegation pool attains 1 million APT, the operator can join the validator set. - -The operator address will receive the pool commission that was set at the initialization of the delegation pool, which is automatically distributed as stake in the delegation pool at the end of each epoch. The operator will act as a normal Delegation Pool account that is able to do all of the operations described in [Perform delegation pool operations](#perform-delegation-pool-operations). - - -## Prerequisites - -1. [Install](../../../tools/aptos-cli/install-cli/index.md) and [configure](../../../tools/aptos-cli/use-cli/use-aptos-cli.md#configuration-examples) the Aptos CLI. If you are looking to develop on the Aptos blockchain, debug apps, or perform node operations, the Aptos tool offers a command line interface for these purposes. -2. [Initialize local configuration and create an account](../../../tools/aptos-cli/use-cli/use-aptos-cli.md#initialize-local-configuration-and-create-an-account) on the Aptos blockchain. - - -## Initialize a delegation pool - -Before initializing a delegation pool, you need to know the delegation pool address. You can use the following CLI commands to obtain the delegation pool address depending on where you are in the process: -- Before you create the delegation pool: - ```bash - aptos account derive-resource-account-address --address --seed "aptos_framework::delegation_pool" --seed-encoding utf8 - ``` - - The `` is a number chosen by you to create the resource account address to host the delegation pool resource. Once you choose a seed, you should use the same value for all following usages. -- After you create the delegation pool: - ```bash - aptos account derive-resource-account-address - ``` - -1. Run the command below, substitute in the profile you previously configured during initialization: - ```bash - aptos move run --profile \ - --function-id 0x1::delegation_pool::initialize_delegation_pool \ - --args u64:1000 string:00 - ``` - Where `--args`: - - - `u64:1000` represents `operator_commission_percentage` - 1000 is equivalent to 10% and 10000 is 100%. - - `string:00` represents `delegation_pool_creation_seed` - a number chosen by you to create a resource account associated with your owner address; this account is used to host the delegation pool resource. You should use the same number here as the `--seed` you used in the previous step to create the delegation pool. - - Note that once `operator_commission_percentage` is set, it cannot be changed. - - 2. Once this command is executed without error an account for resources is established using the `owner` signer and a provided `delegation_pool_creation_seed` to hold the `delegation pool resource` and possess the underlying stake pool. - - 3. The `owner` is granted authority over assigning the `operator` and `voter` roles, which are initially held by the `owner`. - - 4. The delegation pool can now accept a minimum amount of 10 APT from any user who wishes to delegate to it. - - 5. The delegation pool can now [connect to the Aptos Network](./connect-to-aptos-network.md). - -## Perform delegation pool operations - -This section describes the available operations that can be performed on this recently created pool. Once the delegation pool has been established, use the Aptos CLI to operate the pool. The available actions that can be performed on it include: - -* Add `amount` of coins to the delegation pool `pool_address` using the public entry method `add_stake(delegator: &signer, pool_address: address, amount u64)` and substituting your values into the command below before running it: - - ```bash - aptos move run --profile delegator \ - --function-id 0x1::delegation_pool::add_stake \ - --args address: u64: - ``` - -* Undelegate (unlock) the amount of funds from the delegator's active and pending active stake up to the limit of the active stake in the stake pool using public entry method `unlock(delegator: &signer, pool_address: address, amount: u64)` and substituting your values into the command below before running it: - - ```bash - aptos move run --profile delegator \ - --function-id 0x1::delegation_pool::unlock \ - --args address: u64: - ``` - -* Cancel undelegate (reactivate stake) `amount` of coins from `pending_inactive` state to `active state` using public entry method `reactivate_stake(delegator: &signer, pool_address: address, amount: u64)` with the command and your values: - - ```bash - aptos move run --profile delegator \ - --function-id 0x1::delegation_pool::reactivate_stake \ - --args address: u64: - ``` - -* Withdraw `amount` of owned inactive stake from the delegation pool at `pool_address` using the public entry method ` withdraw(delegator: &signer, pool_address: address, amount: u64)` and the command: - - ```bash - aptos move run --profile delegator \ - --function-id 0x1::delegation_pool::withdraw \ - --args address: u64: - ``` - - -## Perform pool owner operations - -Delegation pool owners have access to specific methods designed for modifying the `operator` and `voter` roles of the delegation pool. Use the following Aptos CLI commands and include the relevant addresses: - -* Set the operator address for the delegation pool: - - ```bash - aptos move run --profile delegation_pool_owner \ - --function-id 0x1::delegation_pool::set_operator \ - --args address: - ``` - - -## Check delegation pool information - -Until the delegation pool has received 1 million APT and the validator has been added to the set of active validators, there will be no rewards to track during each cycle. In order to obtain information about a delegation pool, use the Aptos [View function](../../../apis/fullnode-rest-api.md#reading-state-with-the-view-function). - -* `get_owned_pool_address(owner: address): address` - Returns the address of the delegation pool belonging to the owner, or produces an error if there is no delegation pool associated with the owner. - -* `delegation_pool_exists(addr: address): bool` - Returns true if a delegation pool exists at the provided address `addr`. - -* `operator_commission_percentage(pool_address: address): u64` - Returns the operator commission percentage set on the delegation pool at initialization. - -* `get_stake(pool_address: address, delegator_address: address): (u64, u64, u64)` - Returns total stake owned by `delegator_address` within delegation pool `pool_address` in each of its individual states: (`active`,`inactive`,`pending_inactive`). - -* `get_delegation_pool_stake(pool_address: address): (u64, u64, u64, u64)` - Returns the stake amounts on `pool_address` in the different states: (`active`,`inactive`,`pending_active`,`pending_inactive`). - -* `shareholders_count_active_pool(pool_address: address): u64` - Returns the number of delegators owning an active stake within `pool_address`. - -* `get_pending_withdrawal(pool_address: address, delegator_address: address): (bool, u64)` - Returns if the specified delegator possesses any withdrawable stake. However, if the delegator has recently initiated a request to release some of their stake and the stake pool's lockup cycle has not ended yet, then their funds may not yet be available for withdrawal. - -* `can_withdraw_pending_inactive(pool_address: address): bool` - Returns whether `pending_inactive` stake can be directly withdrawn from the delegation pool, implicitly its stake pool, in the special case the validator had gone inactive before its lockup expired. - - -In the [Aptos TypeScript SDK](../../../sdks/ts-sdk/index.md), a View function request would resemble: - -```ts -import {Aptos, AptosConfig} from "@aptos-labs/ts-sdk"; - -const NODE_URL = "https://aptos-testnet.public.blastapi.io"; - -(async () => { - const aptosConfig = new AptosConfig({fullnode:NODE_URL}) - const aptos = new Aptos(aptosConfig); - const payload: InputViewRequestData = { - function: "0x1::delagation_pool::get_stake", - functionArguments: ["pool_address", "delegator_address"], - }; - console.log(await aptos.view({payload})); -})(); - -``` -Alternatively, you can use Aptos CLI to call View functions. - -```bash - aptos move view [OPTIONS] --function-id -``` - -To discover the available options and the process for making an `aptos move view` call, access the help information with `aptos move view --help`. This will display the required arguments for invoking the view functions. - - -## Compute delegation pool rewards earned -Use this formula to calculate *rewards earned* for `active` and `pending_inactive` staking. This formula assumes that different stake operations such as `unlock` and `reactivate` take out the *principals* first and then *rewards*. Therefore, *rewards earned* may vary based upon how the formula you use is constructed: - -1. Get the amount of `active` and `pending_inactive` staking from the [`get_stake`](https://github.com/aptos-labs/aptos-core/blob/ed63ab756cda61439287304ed89bbb156fcbeaed/aptos-move/framework/aptos-framework/sources/delegation_pool.move#L321) view function. - -2. Calculate principal: - - "active principal" = **AddStakeEvent** - **UnlockStakeEvent** + **ReactivateStakeEvent**. If at any point during the iteration, "active principal" < 0, reset to 0. Negative principal could happen when the amount users `unlock` include rewards earned from staking. - - "pending inactive principal" = **UnlockStakeEvent** - **ReactivateStakeEvent**. If at any point during the iteration, "pending inactive principal" < 0, reset to 0. Negative principal could happen when the amount users `reactivate` include rewards earned from staking. - -3. Compute rewards earned: - - active_rewards = `active` - *active principal*. - - pending_inactive_rewards = `pending_inactive` - "pending inactive principal". diff --git a/developer-docs-site/docs/nodes/validator-node/operator/index.md b/developer-docs-site/docs/nodes/validator-node/operator/index.md deleted file mode 100644 index 8afed326d3114..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/index.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: "Operator" -slug: "index" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Operator - -If you are an operator participating in the Aptos network, then use this document to perform the operator tasks such as deploying a validator node and validator fullnode, registering the nodes on the Aptos community platform, and performing the validation. - -:::tip Both validator node and validator fullnode are required for mainnet -For participating in the Aptos mainnet, you must deploy both a validator node and a validator fullnode. -::: - -## Deploy the nodes and register - -**Step 1:** Read the [**Node Requirements**](./node-requirements.md) and make sure that your hardware, storage and network resources satisfy the node requirements. - -**Step 2:** **Deploy the nodes**. Follow the detailed node installation steps provided in [**Running Validator Node**](running-validator-node/index.md) and deploy a validator node and a validator fullnode. - -Note that your nodes will not be running correctly (not syncing, not participating in consensus), until they're added to the validator set via [staking pool operations](./shutting-down-nodes.md) (below). - -## Connect to Aptos network - -After deploying your nodes, [connect to the Aptos Network](./connect-to-aptos-network.md). - -## Set up staking and delegation pool operations - -After connecting your nodes to the Aptos network, establish [staking pool operations](./staking-pool-operations.md) to add your node to the validator set. - -Similarly, conduct [delegation pool operations](./delegation-pool-operations.md) for APT delegated to your validator. Your node will start syncing and participating in consensus. - -## Ensure your nodes are live - -After your nodes are deployed and configure, make sure they meet [node liveness criteria](./node-liveness-criteria.md). diff --git a/developer-docs-site/docs/nodes/validator-node/operator/node-liveness-criteria.md b/developer-docs-site/docs/nodes/validator-node/operator/node-liveness-criteria.md deleted file mode 100644 index ee3501ed50a0f..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/node-liveness-criteria.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "Node Liveness Criteria" -slug: "node-liveness-criteria" ---- - -# Node Liveness Criteria - -When you participate in the Aptos network, your validator node and the validator fullnode must pass liveness checks within 24 hours of being selected to participate in the network, and at a regular cadence onwards. This is required to ensure that your nodes contribute to the health of the overall network. - -This document describes how you can verify the status of your deployed validator node in the Aptos network to meet our success criteria. Once your nodes are up and running, you can also [measure their success](../../measure/index.md) using the [Node Health Checker](../../measure/node-health-checker.md) and [Node Inspection Service](../../measure/node-inspection-service.md). - -The liveness of your validator node will be evaluated using both on-chain and off-chain data. On-chain data will be pulled directly from your validator node syncing to the chain, and off-chain data will be received from your validator node via telemetry. Such data includes: - -- At least one proposed block per hour. This data will be used to determine your node’s availability over time. -- Telemetry data pushed by your validator node: - - A continuously increasing synced version of your node, alongside a reasonable delta from the highest state of the blockchain. - - Aptos Labs' validator is among your set of peers. - -## Verifying the liveness of your node - -### Monitoring tools - -If you are a node operator, then several tools are available to you (provided by the Aptos team and the community) to verify the status of your own node locally. This local status will act as a good proxy for overall node health as seen from the network level and as reported by the remote analytics system operated by Aptos Labs. - -- Locally, the best way to verify your node status is to interact with your node. You can monitor your local metrics endpoint by running a `curl` command and observe various key metrics. Follow the steps described in detail in the [Verify initial synchronization](../../full-node/fullnode-source-code-or-docker.md#verify-the-correctness-of-your-fullnode) document. - -- To make your validator node more observable, install monitoring tools that scrape the local metrics endpoint: - - For Kubernetes based deployments, install the monitoring Helm chart ([https://github.com/aptos-labs/aptos-core/tree/main/terraform/helm/monitoring](https://github.com/aptos-labs/aptos-core/tree/main/terraform/helm/monitoring)). - - Locally, you may run Prometheus and Grafana directly. Dashboards that utilize the metrics can be found here: ([https://github.com/aptos-labs/aptos-core/tree/main/dashboards](https://github.com/aptos-labs/aptos-core/tree/main/dashboards)). - -The above two monitoring methods rely on your node’s reported Prometheus Metrics. Of particular importance, the following metrics are directly related to the liveness success criteria above: - -- `aptos_consensus_proposals_count` -- `aptos_state_sync_version{type="synced"}` -- `aptos_connections` - -### Monitor via telemetry - -Remotely, the Aptos team can verify the state of your node via [telemetry](../../../reference/telemetry.md). When you enable telemetry on your node, the Aptos node binary will send telemetry data in the background to the Aptos team. - -Telemetry data from your node is necessary for the Aptos team to evaluate the off-chain liveness metrics for verification. You can view the exact contents of each telemetry call by checking the `DEBUG` logs on your validator. If your node is using the default config without explicitly disabling telemetry, and has HTTPS egress access to the internet, then it will report various key metrics to Aptos Labs, such as the current synced version and peers connected to your node. - -Aptos Labs will also observe the on-chain events such as proposals per hour on your node, as defined in the liveness criteria. - -Aptos Labs’ own analytics system will aggregate all the off-chain telemetry data and all on-chain participation events to calculate your node’s health. Node health will be displayed on the community platform site as well as on a separate validator leaderboard for each testnet. - diff --git a/developer-docs-site/docs/nodes/validator-node/operator/node-requirements.md b/developer-docs-site/docs/nodes/validator-node/operator/node-requirements.md deleted file mode 100644 index 94a33f4921800..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/node-requirements.md +++ /dev/null @@ -1,102 +0,0 @@ ---- -title: "Node Requirements" -slug: "node-requirements" ---- - -# Node Requirements - -To make your validator node and validator fullnode deployment hassle-free, make sure you have the resources specified in this document. - -## Validator and validator fullnode - -- **Both a validator node and a validator fullnode required:** For the Aptos mainnet, we require that you run a validator node and a validator fullnode. We strongly recommend that you run the validator node and the validator fullnode on two separate and independent machines. Make sure that these machines are well-provisioned and isolated from each other. Guaranteeing the resource isolation between the validator and the validator fullnode will help ensure smooth deployment of these nodes. -- **Public fullnode is optional:** We recommend that optionally you run a public fullnode also. However, a public fullnode is not required. If you run public fullnode also, then we strongly recommend that you run the public fullnode on a third machine that is separate and independent from either the validator or the validator fullnode machines. -:::tip Terraform support -For deploying the nodes in cloud we have provided Terraform support on two cloud providers: **GCP** and **AWS**. See [**Running Validator Node**](running-validator-node/index.md). -::: - -- **Open the network ports:** Make sure that you open the network ports prior to connecting to the network. See [Ports](#ports). -- **Close the network ports:** Make sure that you close these ports after either being accepted or rejected for the network. - -## Hardware requirements - -For running an Aptos **validator node and validator fullnode** we recommend the following hardware resources: - - - **CPU**: - - 8 cores, 16 threads - - 2.8GHz, or faster - - Intel Xeon Skylake or newer - - **Memory**: 32GB RAM. - - **Storage**: 2T SSD with at least 40K IOPS and 200MiB/s bandwidth. - - **Networking bandwidth**: 1Gbps - -### Example machine types on various clouds - -- **AWS** - - c6id.8xlarge (if use local SSD) - - c6i.8xlarge + io1/io2 EBS volume with 40K IOPS. -- **GCP** - - n2-standard-16 (if use local SSD) - - n2-standard-32 + pd-ssd with 40K IOPS. - -### Motivations for hardware requirements - -Hardware requirements depend on the transaction rate and storage demands. The amount of data stored by the Aptos blockchain depends on the ledger history (the number of transactions) of the blockchain and the number of on-chain states (e.g., accounts and resources). Ledger history and the number of on-chain states depend on several factors: the age of the blockchain, the average transaction rate, and the configuration of the ledger pruner. - -The current hardware requirements are set considering the estimated growth over the period ending in Q1-2023. Note that we cannot provide a recommendation for archival node storage size as that is an ever-growing number. - -**Local SSD vs. network storage** - -Cloud deployments require choosing between using local or network storage such as AWS EBS, GCP PD. Local SSD provides lower latency and cost, especially relative to IOPS. - -On the one hand, network storage requires additional CPU support to scale IOPS, but on the other hand, the network storage provides better support for backup snapshots and provide resilience for the nodes in scenarios where the instance is stopped. Network storage makes it easier to support storage needs for high availability. - -## Ports - -When you are running a validator node, you are required to open network ports on your node to allow other nodes to connect to you. For fullnodes this is optional. - -### Network types - -Your node can be configured so that each of these networks can connect using a different port on your node. - -There are three types of Aptos networks: -1. **Validator network:** A validator node connects to this network. -2. **Public network:** A public fullnode connects to this network. -3. **Validator fullnode network:** A validator fullnode (VFN) connects to this network. The VFN network allows the validator fullnode to connect to a specific validator. - -You can configure the port settings on your node using the configuration YAML file. See the [example configuration YAML here](https://github.com/aptos-labs/aptos-core/blob/4ce85456853c7b19b0a751fb645abd2971cc4c0c/docker/compose/aptos-node/fullnode.yaml#L10-L9). With this configuration YAML on your node, the public network connects to your node on port 6182 and the VFN network on 6181. Because these port settings are configurable, we don't explicitly say port X is for network Y. - -### Port settings - -:::tip Default port settings -The recommendations described below assume the default port settings used by validators, validator fullnodes and public fullnodes. **We recommend that you do not expose any other ports while operating a node.** If you have changed the default port settings, then you should adjust the recommendations accordingly. -::: - -#### For the validator: - -- Open the following TCP ports: - - `6180` – Open publicly to enable the validator to connect to other validators in the network. - - `6181` – Open privately to only be accessible by your validator fullnode. -- Close the following TCP ports: - - `6182` – To prevent public fullnode connections - - `9101` – To prevent unauthorized metric inspection - - `80/8080` – To prevent unauthorized REST API access - -#### For the validator fullnode: - -- Open the following TCP ports: - - `6182` – Open publicly to enable public fullnodes to connect to your validator fullnode. - - `6181` – Open privately to only be accessible by your validator. -- Close the following TCP ports: - - `9101` – To prevent unauthorized metric inspection - - `80/8080` – To prevent unauthorized REST API access - -#### For a public fullnode: -- Open the TCP port `6182` publicly to enable other public fullnodes to connect to your node. -- Close the following TCP ports: - - `9101` – To prevent unauthorized metric inspection - - `80/8080` – To prevent unauthorized REST API access - -:::caution Exposing services -We note that the inspection port (`9101`) and the REST API port (`80` or `8080`) are likely useful for your internal network, e.g., application development and debugging. However, the inspection port should never be exposed publicly as it can be easily abused. Similarly, if you choose to expose the REST API endpoint publicly, you should deploy an additional authentication or rate-limiting mechanism to prevent abuse. -::: diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/index.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/index.md deleted file mode 100755 index b4dbed1a2474a..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/index.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: "Running Validator Node" -slug: "running-validator-node" ---- - -# Running Validator Node - -:::tip Deploying a validator node? Read this first -If you are deploying a validator node, then make sure to read the [Node Requirements](../node-requirements.md) first. -::: - -## Install Validator node - -### Deploy - -The following guides provide step-by-step instructions for running public fullnode, validator node, and validator fullnode for the Aptos blockchain. - -- ### [On AWS](./using-aws.md) -- ### [On Azure](./using-azure.md) -- ### [On GCP](./using-gcp.md) -- ### [Using Docker](./using-docker.md) -- ### [Using Aptos Source](./using-source-code.md) - -### Configure Validator node - -### Connect to Aptos network - -After deploying your nodes, [connect to the Aptos Network](../connect-to-aptos-network.md). - -## Set up staking and delegation pool operations - -After connecting your nodes to the Aptos network, establish [staking pool operations](../staking-pool-operations.md) to add your node to the validator set. - -Similarly, conduct [delegation pool operations](../delegation-pool-operations.md) for APT delegated to your validator. Your node will start syncing and participating in consensus. - -## Test Validator node - -After your nodes are deployed and configured, make sure they meet [node liveness criteria](../node-liveness-criteria.md). - -## Install Validator fullnode - -Note that many of the same instructions can be used to run a validator fullnode in Aptos: - -- If you use the provided reference Kubernetes deployments (i.e. for cloud-managed kubernetes on AWS, Azure, or GCP), then one validator node and one validator fullnode are deployed by default. -- When using the Docker or the source code, the `fullnode.yaml` will enable you to run a validator fullnode. - - See [Step 11](./using-docker.md#docker-vfn) in the Docker-based instructions. - - Similarly, if you use source code, see [Step 9](./using-source-code.md#source-code-vfn) in the source code instructions. diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-aws.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-aws.md deleted file mode 100644 index dc55d9e94be79..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-aws.md +++ /dev/null @@ -1,216 +0,0 @@ ---- -title: "On AWS" -slug: "run-validator-node-using-aws" ---- - -# On AWS - -This is a step-by-step guide to install an Aptos node on Amazon Web Services (AWS). Follow these steps to configure a validator node and a validator fullnode on separate machines. - -:::caution Did you set up your AWS account? -This guide assumes that you already have AWS account setup. -::: - -:::danger Do you have stale volumes after bumping your deployment's era? -`era` is a concept relevant only to Kubernetes deployments of an Aptos node. Changing the `era` provides an easy way to wipe your deployment's state. However, this may lead to dangling persistent volumes on validator fullnodes. Confirm the existence of these volumes with `kubectl get pvc` and delete them manually to minimize costs. -::: - -## Before you proceed - -Make sure you complete these prerequisite steps before you proceed: - -1. Set up your AWS account. -2. Make sure the following are installed on your local computer: - - **Aptos CLI**: https://aptos.dev/tools/aptos-cli/install-cli/index - - **Terraform 1.3.6**: https://www.terraform.io/downloads.html - - **Kubernetes CLI**: https://kubernetes.io/docs/tasks/tools/ - - **AWS CLI**: https://aws.amazon.com/cli/ - -## Install - -:::tip One validator node + one validator fullnode -Follow the below instructions **twice**, i.e., first on one machine to run a validator node and the second time on another machine to run a validator fullnode. -::: - -1. Create a working directory for your node configuration. - - * Choose a workspace name, for example, `mainnet` for mainnet, or `testnet` for testnet, and so on. **Note**: This defines the Terraform workspace name, which, in turn, is used to form the resource names. - - ```bash - export WORKSPACE=mainnet - ``` - - * Create a directory for the workspace. - - ```bash - mkdir -p ~/$WORKSPACE - ``` - - * Choose a username for your node, for example `alice`. - - ```bash - export USERNAME=alice - ``` - -2. Create an S3 storage bucket for storing the Terraform state on AWS. You can do this on the AWS UI or by the below command: - - ```bash - aws s3 mb s3:// --region - ``` - -3. Create a Terraform file called `main.tf` in your working directory: - - ```bash - cd ~/$WORKSPACE - vi main.tf - ``` - -4. Modify the `main.tf` file to configure Terraform and to create Aptos fullnode from the Terraform module. See below example content for `main.tf`: - - ``` - terraform { - required_version = "~> 1.3.6" - backend "s3" { - bucket = "terraform.aptos-node" - key = "state/aptos-node" - region = - } - } - - provider "aws" { - region = - } - - module "aptos-node" { - # Download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/aptos-node/aws?ref=mainnet" - region = # Specify the region - # zone_id = "" # zone id for Route53 if you want to use DNS - era = 1 # bump era number to wipe the chain - chain_id = 1 # for mainnet. Use different value for testnet or devnet. - image_tag = "mainnet" # Specify the image tag to use - validator_name = "" - } - ``` - - For full customization options, see: - - The Terraform variables file [https://github.com/aptos-labs/aptos-core/blob/main/terraform/aptos-node/aws/variables.tf](https://github.com/aptos-labs/aptos-core/blob/main/terraform/aptos-node/aws/variables.tf), and - - The values YAML file [https://github.com/aptos-labs/aptos-core/blob/main/terraform/helm/aptos-node/values.yaml](https://github.com/aptos-labs/aptos-core/blob/main/terraform/helm/aptos-node/values.yaml). - -5. Initialize Terraform in the `$WORKSPACE` directory where you created the `main.tf` file. - - ```bash - terraform init - ``` -This will download all the Terraform dependencies into the `.terraform` folder in your current working directory. - -6. Create a new Terraform workspace to isolate your environments: - - ```bash - terraform workspace new $WORKSPACE - # This command will list all workspaces - terraform workspace list - ``` - -7. Apply the configuration. - - ```bash - terraform apply - ``` - - This may take a while to finish (~20 minutes). Terraform will create all the resources on your AWS cloud account. - -8. After `terraform apply` finishes, you can check if those resources are created: - - - `aws eks update-kubeconfig --name aptos-$WORKSPACE`: To configure access for your k8s cluster. - - `kubectl get pods`: This should have haproxy, validator and fullnode, with validator and fullnode pod `pending` (require further action in later steps). - - `kubectl get svc`: This should have `validator-lb` and `fullnode-lb`, with an external IP you can share later for connectivity. - -9. Get your node IP information into your environment: - - ```bash - export VALIDATOR_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-validator-lb --output jsonpath='{.status.loadBalancer.ingress[0].hostname}')" - - export FULLNODE_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-fullnode-lb --output jsonpath='{.status.loadBalancer.ingress[0].hostname}')" - ``` - -10. Generate the key pairs (node owner, voter, operator key, consensus key and networking key) in your working directory. - - ```bash - aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys - ``` - - This will create 4 key files under `~/$WORKSPACE/keys` directory: - - `public-keys.yaml` - - `private-keys.yaml` - - `validator-identity.yaml`, and - - `validator-full-node-identity.yaml`. - - :::danger IMPORTANT - - Backup your `private-keys.yaml` somewhere safe. These keys are important for you to establish ownership of your node. **Never share private keys with anyone.** - ::: - -11. Configure the validator information. - - ```bash - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host $VALIDATOR_ADDRESS:6180 \ - --full-node-host $FULLNODE_ADDRESS:6182 \ - --stake-amount 100000000000000 - - ``` - - This will create two YAML files in the `~/$WORKSPACE/$USERNAME` directory: `owner.yaml` and `operator.yaml`. - -12. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `genesis.blob` - - `waypoint.txt` - -13. **Summary:** To summarize, in your working directory you should have a list of files: - - `main.tf`: The Terraform files to install the `aptos-node` module (from steps 3 and 4). - - `keys` folder containing: - - `public-keys.yaml`: Public keys for the owner account, consensus, networking (from step 10). - - `private-keys.yaml`: Private keys for the owner account, consensus, networking (from step 10). - - `validator-identity.yaml`: Private keys for setting the Validator identity (from step 10). - - `validator-full-node-identity.yaml`: Private keys for setting validator full node identity (from step 10). - - `username` folder containing: - - `owner.yaml`: Defines owner, operator, and voter mapping. - - `operator.yaml`: Node information that will be used for both the validator and the validator fullnode (from step 11). - - `waypoint.txt`: The waypoint for the genesis transaction (from step 12). - - `genesis.blob` The genesis binary that contains all the information about the framework, validator set, and more (from step 12). - -14. Insert `genesis.blob`, `waypoint.txt` and the identity files as secret into k8s cluster. - - ```bash - kubectl create secret generic ${WORKSPACE}-aptos-node-0-genesis-e1 \ - --from-file=genesis.blob=genesis.blob \ - --from-file=waypoint.txt=waypoint.txt \ - --from-file=validator-identity.yaml=keys/validator-identity.yaml \ - --from-file=validator-full-node-identity.yaml=keys/validator-full-node-identity.yaml - ``` - - :::tip - - The `-e1` suffix refers to the era number. If you changed the era number, make sure it matches when creating the secret. - - ::: - - -15. Check that all the pods are running. - - ```bash - kubectl get pods - - NAME READY STATUS RESTARTS AGE - node1-aptos-node-0-fullnode-e9-0 1/1 Running 0 4h31m - node1-aptos-node-0-haproxy-7cc4c5f74c-l4l6n 1/1 Running 0 4h40m - node1-aptos-node-0-validator-0 1/1 Running 0 4h30m - ``` - -You have successfully completed setting up your node. Make sure that you have set up one machine to run a validator node and a second machine to run a validator fullnode. - -Now proceed to [connecting to the Aptos network](../connect-to-aptos-network.md) and [establishing staking pool operations](../staking-pool-operations.md). diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-azure.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-azure.md deleted file mode 100644 index da589879eccc1..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-azure.md +++ /dev/null @@ -1,204 +0,0 @@ ---- -title: "On Azure" -slug: "run-validator-node-using-azure" ---- - -# On Azure - -This is a step-by-step guide to install an Aptos node on Microsoft Azure. Follow these steps to configure a validator node and a validator fullnode on separate machines. - -:::caution Did you set up your Azure account? -This guide assumes that you already have Azure account setup. -::: - -:::danger Do you have stale volumes after bumping your deployment's era? -`era` is a concept relevant only to Kubernetes deployments of an Aptos node. Changing the `era` provides an easy way to wipe your deployment's state. However, this may lead to dangling persistent volumes on validator fullnodes. Confirm the existence of these volumes with `kubectl get pvc` and delete them manually to minimize costs. -::: - -## Before you proceed - -Make sure you complete these prerequisite steps before you proceed: - -- **Azure account**: https://azure.microsoft.com/ -- **Aptos CLI**: https://aptos.dev/tools/aptos-cli/install-cli/index -- **Terraform 1.3.6**: https://www.terraform.io/downloads.html -- **Kubernetes CLI**: https://kubernetes.io/docs/tasks/tools/ -- **Azure CLI**: https://docs.microsoft.com/en-us/cli/azure/install-azure-cli - -## Install - -:::tip One validator node + one validator fullnode -Follow the below instructions **twice**, i.e., first on one machine to run a validator node and the second time on another machine to run a validator fullnode. -::: - -1. Create a working directory for your configuration. - - * Choose a workspace name, for example, `mainnet` for mainnet, or `testnet` for testnet, and so on. **Note**: This defines the Terraform workspace name, which, in turn, is used to form the resource names. - - ```bash - export WORKSPACE=mainnet - ``` - - * Create a directory for the workspace. - - ```bash - mkdir -p ~/$WORKSPACE - ``` - - * Choose a username for your node, for example `alice`. - - ```bash - export USERNAME=alice - ``` - -2. Create a blob storage container for storing the Terraform state on Azure, you can do this on Azure UI or by the command: - - ```bash - az group create -l -n aptos-$WORKSPACE - az storage account create -n -g aptos-$WORKSPACE -l --sku Standard_LRS - az storage container create -n --account-name --resource-group aptos-$WORKSPACE - ``` - -3. Create Terraform file called `main.tf` in your working directory: - ```bash - cd ~/$WORKSPACE - vi main.tf - ``` - -4. Modify `main.tf` file to configure Terraform, and create fullnode from Terraform module. Example content for `main.tf`: - - ``` - terraform { - required_version = "~> 1.3.6" - backend "azurerm" { - resource_group_name = - storage_account_name = - container_name = - key = "state/validator" - } - } - module "aptos-node" { - # download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/aptos-node/azure?ref=mainnet" - region = # Specify the region - era = 1 # bump era number to wipe the chain - chain_id = 1 # for mainnet. Use different value for testnet or devnet. - image_tag = "mainnet" # Specify the docker image tag to use - validator_name = "" - } - ``` - - For the full customization options, see the variables file [`variables.tf`](https://github.com/aptos-labs/aptos-core/blob/main/terraform/aptos-node/azure/variables.tf), and the [Helm values](https://github.com/aptos-labs/aptos-core/blob/main/terraform/helm/aptos-node/values.yaml). - -5. Initialize Terraform in the same directory of your `main.tf` file. - ```bash - terraform init - ``` -This will download all the Terraform dependencies for you, in the `.terraform` folder in your current working directory. - -6. Create a new Terraform workspace to isolate your environments: - ```bash - terraform workspace new $WORKSPACE - # This command will list all workspaces - terraform workspace list - ``` - -7. Apply the configuration. - ```bash - terraform apply - ``` - This might take a while to finish (~20 minutes), Terraform will create all the resources on your cloud account. - -8. Once terraform apply finishes, you can check if those resources are created: - - - `az aks get-credentials --resource-group aptos-$WORKSPACE --name aptos-$WORKSPACE` to configure access for your k8s cluster. - - `kubectl get pods` this should have haproxy, validator and fullnode. with validator and fullnode pod `pending` (require further action in later steps) - - `kubectl get svc` this should have `validator-lb` and `fullnode-lb`, with an external-IP you can share later for connectivity. - -9. Get your node IP info: - - ```bash - export VALIDATOR_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-validator-lb --output jsonpath='{.status.loadBalancer.ingress[0].hostname}')" - - export FULLNODE_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-fullnode-lb --output jsonpath='{.status.loadBalancer.ingress[0].hostname}')" - ``` - -10. Generate the key pairs (node owner, voter, operator key, consensus key and networking key) in your working directory. - - ```bash - aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys - ``` - - This will create 4 key files under `~/$WORKSPACE/keys` directory: - - `public-keys.yaml` - - `private-keys.yaml` - - `validator-identity.yaml`, and - - `validator-full-node-identity.yaml`. - - :::danger IMPORTANT - - Backup your `private-keys.yaml` somewhere safe. These keys are important for you to establish ownership of your node. **Never share private keys with anyone.** - ::: - -11. Configure the validator information. - - ```bash - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host $VALIDATOR_ADDRESS:6180 \ - --full-node-host $FULLNODE_ADDRESS:6182 \ - --stake-amount 100000000000000 - - ``` - - This will create two YAML files in the `~/$WORKSPACE/$USERNAME` directory: `owner.yaml` and `operator.yaml`. - -12. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `genesis.blob` - - `waypoint.txt` - -13. **Summary:** To summarize, in your working directory you should have a list of files: - - `main.tf`: The Terraform files to install the `aptos-node` module (from steps 3 and 4). - - `keys` folder containing: - - `public-keys.yaml`: Public keys for the owner account, consensus, networking (from step 10). - - `private-keys.yaml`: Private keys for the owner account, consensus, networking (from step 10). - - `validator-identity.yaml`: Private keys for setting the Validator identity (from step 10). - - `validator-full-node-identity.yaml`: Private keys for setting validator full node identity (from step 10). - - `username` folder containing: - - `owner.yaml`: Defines owner, operator, and voter mapping. They are all the same account in test mode (from step 11). - - `operator.yaml`: Node information that will be used for both the Validator and the fullnode (from step 11). - - `waypoint.txt`: The waypoint for the genesis transaction (from step 12). - - `genesis.blob` The genesis binary that contains all the information about the framework, validatorSet and more (from step 12). - -14. Insert `genesis.blob`, `waypoint.txt` and the identity files as secret into k8s cluster. - - ```bash - kubectl create secret generic ${WORKSPACE}-aptos-node-0-genesis-e1 \ - --from-file=genesis.blob=genesis.blob \ - --from-file=waypoint.txt=waypoint.txt \ - --from-file=validator-identity.yaml=keys/validator-identity.yaml \ - --from-file=validator-full-node-identity.yaml=keys/validator-full-node-identity.yaml - ``` - - :::tip - - The `-e1` suffix refers to the era number. If you changed the era number, make sure it matches when creating the secret. - - ::: - -15. Check that all pods are running. - - ```bash - kubectl get pods - - NAME READY STATUS RESTARTS AGE - node1-aptos-node-0-fullnode-e9-0 1/1 Running 0 4h31m - node1-aptos-node-0-haproxy-7cc4c5f74c-l4l6n 1/1 Running 0 4h40m - node1-aptos-node-0-validator-0 1/1 Running 0 4h30m - ``` - -You have successfully completed setting up your node. Make sure that you have set up one machine to run a validator node and a second machine to run a validator fullnode. - -Now proceed to [connecting to the Aptos network](../connect-to-aptos-network.md) and [establishing staking pool operations](../staking-pool-operations.md). diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-docker.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-docker.md deleted file mode 100644 index 75b9482b7d868..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-docker.md +++ /dev/null @@ -1,124 +0,0 @@ ---- -title: "Using Docker" -slug: "run-validator-node-using-docker" ---- - -# Using Docker - -This is a step-by-step guide to install an Aptos node using Docker. Follow these steps to configure a validator node and a validator fullnode on separate machines. Use the `fullnode.yaml` to run a validator fullnode. See [Step 11](#docker-vfn). - -## Before you proceed - -Make sure the following are installed on your local computer: - - **Aptos CLI**: https://aptos.dev/tools/aptos-cli/install-cli/index - - **Docker and Docker-compose:** https://docs.docker.com/engine/install/ - -:::caution Note on Apple M1 - -Docker method has only been tested on Linux, Windows, and Intel macOS. If you are on M1 macOS, use the [Aptos-core source approach](./using-source-code.md). - -::: - -1. Create a directory for your Aptos node composition, and pick a username for your node. e.g. - ```bash - export WORKSPACE=mainnet - export USERNAME=alice - mkdir ~/$WORKSPACE - cd ~/$WORKSPACE - ``` - -2. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `validator.yaml` - - `docker-compose.yaml` - - `docker-compose-fullnode.yaml` - - `haproxy.cfg` - - `haproxy-fullnode.cfg` - - `blocked.ips` - -3. Generate the key pairs (node owner, voter, operator key, consensus key and networking key) in your working directory. - - ```bash - aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys - ``` - - This will create 4 key files under `~/$WORKSPACE/keys` directory: - - `public-keys.yaml` - - `private-keys.yaml` - - `validator-identity.yaml`, and - - `validator-full-node-identity.yaml`. - - :::danger IMPORTANT - - Backup your `private-keys.yaml` somewhere safe. These keys are important for you to establish ownership of your node. **Never share private keys with anyone.** - ::: - -4. Configure validator information. You need to setup a static IP / DNS address (DNS is much preferred) which can be used by the node, and make sure the network / firewalls are properly configured to accept external connections. See [Network Identity For Fullnode](../../../full-node/network-identity-fullnode.md) for how to do this. - - ```bash - cd ~/$WORKSPACE - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host : \ - --full-node-host : \ - --stake-amount 100000000000000 - - # for example, with IP: - - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host 35.232.235.205:6180 \ - --full-node-host 34.135.169.144:6182 \ - --stake-amount 100000000000000 - - # For example, with DNS: - - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host bot.aptosdev.com:6180 \ - --full-node-host fn.bot.aptosdev.com:6182 \ - --stake-amount 100000000000000 - ``` - - This will create two YAML files in the `~/$WORKSPACE/$USERNAME` directory: `owner.yaml` and `operator.yaml`. - -5. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `genesis.blob` - - `waypoint.txt` - -6. To recap, in your working directory, you should have a list of files: - - - `docker-compose.yaml` docker compose file to run validator and fullnode - - `keys` folder containing: - - `public-keys.yaml`: Public keys for the owner account, consensus, networking (from step 4). - - `private-keys.yaml`: Private keys for the owner account, consensus, networking (from step 4). - - `validator-identity.yaml`: Private keys for setting the Validator identity (from step 4). - - `validator-full-node-identity.yaml`: Private keys for setting validator full node identity (from step 4). - - `username` folder containing: - - `owner.yaml`: define owner, operator, and voter mapping. They are all the same account in test mode (from step 5). - - `operator.yaml`: Node information that will be used for both the Validator and the fullnode (from step 5). - - `waypoint.txt`: The waypoint for the genesis transaction (from step 6). - - `genesis.blob` The genesis binary that contains all the information about the framework, validatorSet and more (from step 6). - -7. Run docker-compose: `docker-compose up`. (or `docker compose up` depends on your version) - -**Now you have completed setting up your validator node. Next, setup a validator fullnode following the instructions below.** - -9. Set up a validator fullnode on a different machine. Download the `fullnode.yaml` and `docker-compose-fullnode.yaml` configuration files into the working directory of fullnode machine. See [Node Files](../../../node-files-all-networks/node-files.md) for a full list of files you should download and the download commands. - -10. Edit `fullnode.yaml` file to update the IP address for validator node. - -11. Copy the `validator-full-node-identity.yaml`, download `genesis.blob` and `waypoint.txt` files into the same working directory on fullnode machine. - -12. Run docker-compose: `docker-compose -f docker-compose-fullnode.yaml up`. - -Now you have successfully completed setting up your node. - -Now proceed to [connecting to the Aptos network](../connect-to-aptos-network.md) and [establishing staking pool operations](../staking-pool-operations.md). - - diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-gcp.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-gcp.md deleted file mode 100644 index d6b475067ae96..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-gcp.md +++ /dev/null @@ -1,203 +0,0 @@ ---- -title: "On GCP" -slug: "run-validator-node-using-gcp" ---- - -# On GCP - -This is a step-by-step guide to install an Aptos node on Google GCP. Follow these steps to configure a validator node and a validator fullnode on separate machines. - -:::caution Did you set up your GCP account and create a project? -This guide assumes you already have a Google Cloud Platform (GCP) account setup, and have created a new project for deploying Aptos node. If you are not familiar with GCP (Google Cloud Platform), review the [Prerequisites](../../../full-node/run-a-fullnode-on-gcp#prerequisites) section for GCP account setup. -::: - -:::danger Do you have stale volumes after bumping your deployment's era? -`era` is a concept relevant only to Kubernetes deployments of an Aptos node. Changing the `era` provides an easy way to wipe your deployment's state. However, this may lead to dangling persistent volumes on validator fullnodes. Confirm the existence of these volumes with `kubectl get pvc` and delete them manually to minimize costs. -::: - -## Before you proceed - -Make sure the following are setup for your environment: - - **GCP account**: hhttps://cloud.google.com/ - - **Aptos CLI**: https://aptos.dev/tools/aptos-cli/install-cli/ - - **Terraform 1.3.6**: https://www.terraform.io/downloads.html - - **Kubernetes CLI**: https://kubernetes.io/docs/tasks/tools/ - - **Google Cloud CLI**: https://cloud.google.com/sdk/docs/install-sdk - -## Install - -:::tip One validator node + one validator fullnode -Follow the below instructions **twice**, i.e., first on one machine to run a validator node and the second time on another machine to run a validator fullnode. -::: - -1. Create a working directory for your configuration. - - * Choose a workspace name, for example, `mainnet` for mainnet, or `testnet` for testnet, and so on. **Note**: This defines the Terraform workspace name, which, in turn, is used to form the resource names. - ```bash - export WORKSPACE=mainnet - ``` - - * Create a directory for the workspace - ```bash - mkdir -p ~/$WORKSPACE - ``` - - * Choose a username for your node, for example `alice`. - - ```bash - export USERNAME=alice - ``` - -2. Create a storage bucket for storing the Terraform state on Google Cloud Storage. Use the GCP UI or Google Cloud Storage command to create the bucket. The name of the bucket must be unique. See the Google Cloud Storage documentation here: https://cloud.google.com/storage/docs/creating-buckets#prereq-cli. - - ```bash - gsutil mb gs://BUCKET_NAME - # for example - gsutil mb gs://-aptos-terraform-dev - ``` - -3. Create Terraform file called `main.tf` in your working directory: - ```bash - cd ~/$WORKSPACE - touch main.tf - ``` - -4. Modify `main.tf` file to configure Terraform, and create fullnode from Terraform module. Example content for `main.tf`: - ``` - terraform { - required_version = "~> 1.3.6" - backend "gcs" { - bucket = "BUCKET_NAME" # bucket name created in step 2 - prefix = "state/aptos-node" - } - } - - module "aptos-node" { - # download Terraform module from aptos-labs/aptos-core repo - source = "github.com/aptos-labs/aptos-core.git//terraform/aptos-node/gcp?ref=mainnet" - region = "us-central1" # Specify the region - zone = "c" # Specify the zone suffix - project = "" # Specify your GCP project ID - era = 1 # bump era number to wipe the chain - chain_id = 1 # for mainnet. Use different value for testnet or devnet. - image_tag = "mainnet" # Specify the docker image tag to use - validator_name = "" - } - ``` - - For the full customization options, see the variables file [`variables.tf`](https://github.com/aptos-labs/aptos-core/blob/main/terraform/aptos-node/gcp/variables.tf), and the [helm values](https://github.com/aptos-labs/aptos-core/blob/main/terraform/helm/aptos-node/values.yaml). - -5. Initialize Terraform in the same directory of your `main.tf` file - ```bash - terraform init - ``` -This will download all the Terraform dependencies for you, in the `.terraform` folder in your current working directory. - -6. Create a new Terraform workspace to isolate your environments: - ```bash - terraform workspace new $WORKSPACE - # This command will list all workspaces - terraform workspace list - ``` - -7. Apply the configuration. - - ```bash - terraform apply - ``` - - This might take a while to finish (10 - 20 minutes), Terraform will create all the resources on your cloud account. - -8. Once Terraform apply finishes, you can check if those resources are created: - - - `gcloud container clusters get-credentials aptos-$WORKSPACE --zone --project ` to configure the access for k8s cluster. - - `kubectl get pods` this should have haproxy, validator and fullnode. with validator and fullnode pod `pending` (require further action in later steps) - - `kubectl get svc` this should have `validator-lb` and `fullnode-lb`, with an external-IP you can share later for connectivity. - -9. Get your node IP info: - - ```bash - export VALIDATOR_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-validator-lb --output jsonpath='{.status.loadBalancer.ingress[0].ip}')" - - export FULLNODE_ADDRESS="$(kubectl get svc ${WORKSPACE}-aptos-node-0-fullnode-lb --output jsonpath='{.status.loadBalancer.ingress[0].ip}')" - ``` - -10. Generate the key pairs (node owner, voter, operator key, consensus key and networking key) in your working directory. - - ```bash - aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys - ``` - - This will create 4 key files under `~/$WORKSPACE/keys` directory: - - `public-keys.yaml` - - `private-keys.yaml` - - `validator-identity.yaml`, and - - `validator-full-node-identity.yaml`. - - :::danger IMPORTANT - - Backup your `private-keys.yaml` somewhere safe. These keys are important for you to establish ownership of your node. **Never share private keys with anyone.** - ::: - -11. Configure the validator information. - - ```bash - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host $VALIDATOR_ADDRESS:6180 \ - --full-node-host $FULLNODE_ADDRESS:6182 \ - --stake-amount 100000000000000 - - ``` - - This will create two YAML files in the `~/$WORKSPACE/$USERNAME` directory: `owner.yaml` and `operator.yaml`. - -12. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `genesis.blob` - - `waypoint.txt` - -13. To summarize, in your working directory you should have a list of files: - - `main.tf`: The Terraform files to install the `aptos-node` module (from steps 3 and 4). - - `keys` folder, which includes: - - `public-keys.yaml`: Public keys for the owner account, consensus, networking (from step 10). - - `private-keys.yaml`: Private keys for the owner account, consensus, networking (from step 10). - - `validator-identity.yaml`: Private keys for setting the Validator identity (from step 10). - - `validator-full-node-identity.yaml`: Private keys for setting validator full node identity (from step 10). - - `username` folder, which includes: - - `owner.yaml`: define owner, operator, and voter mapping. They are all the same account in test mode (from step 11). - - `operator.yaml`: Node information that will be used for both the Validator and the fullnode (from step 11). - - `waypoint.txt`: The waypoint for the genesis transaction (from step 12). - - `genesis.blob` The genesis binary that contains all the information about the framework, validatorSet and more (from step 12). - -14. Insert `genesis.blob`, `waypoint.txt` and the identity files as secret into k8s cluster. - - ```bash - kubectl create secret generic ${WORKSPACE}-aptos-node-0-genesis-e1 \ - --from-file=genesis.blob=genesis.blob \ - --from-file=waypoint.txt=waypoint.txt \ - --from-file=validator-identity.yaml=keys/validator-identity.yaml \ - --from-file=validator-full-node-identity.yaml=keys/validator-full-node-identity.yaml - ``` - - :::tip - - The `-e1` suffix refers to the era number. If you changed the era number, make sure it matches when creating the secret. - - ::: - -15. Check that all pods are running. - - ```bash - kubectl get pods - - NAME READY STATUS RESTARTS AGE - node1-aptos-node-0-fullnode-e9-0 1/1 Running 0 4h31m - node1-aptos-node-0-haproxy-7cc4c5f74c-l4l6n 1/1 Running 0 4h40m - node1-aptos-node-0-validator-0 1/1 Running 0 4h30m - ``` - -You have successfully completed setting up your node. Make sure that you have set up one machine to run a validator node and a second machine to run a validator fullnode. - -Now proceed to [connecting to the Aptos network](../connect-to-aptos-network.md) and [establishing staking pool operations](../staking-pool-operations.md). diff --git a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-source-code.md b/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-source-code.md deleted file mode 100644 index 5e75ca4d7abef..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/running-validator-node/using-source-code.md +++ /dev/null @@ -1,166 +0,0 @@ ---- -title: "Using Source Code" -slug: "run-validator-node-using-source" ---- - -# Using Aptos-core source code - -This is a step-by-step guide to install an Aptos node using source code. Follow these steps to configure a validator node and a validator fullnode on separate machines. Use the `fullnode.yaml` to run a validator fullnode—see Step 12. - -## Before you proceed - -Make sure the following are installed on your local computer: - - **Aptos CLI**: https://aptos.dev/tools/aptos-cli/install-cli/index - -## Install - -:::tip One validator node + one validator fullnode -Follow the below instructions **twice**, i.e., first on one machine to run a validator node and the second time on another machine to run a validator fullnode. -::: - -1. Follow steps in [Building Aptos From Source](../../../../guides/building-from-source.md) - -5. Checkout the `mainnet` branch using `git checkout --track origin/mainnet`. - -6. Create a directory for your Aptos node composition, and pick a username for your node. e.g. - ```bash - export WORKSPACE=mainnet - export USERNAME=alice - mkdir ~/$WORKSPACE - ``` - -7. Generate the key pairs (node owner, voter, operator key, consensus key and networking key) in your working directory. - - ```bash - aptos genesis generate-keys --output-dir ~/$WORKSPACE/keys - ``` - - This will create 4 key files under `~/$WORKSPACE/keys` directory: - - `public-keys.yaml` - - `private-keys.yaml` - - `validator-identity.yaml`, and - - `validator-full-node-identity.yaml`. - - :::danger IMPORTANT - - Backup your `private-keys.yaml` somewhere safe. These keys are important for you to establish ownership of your node. **Never share private keys with anyone.** - ::: - -8. Configure validator information. You need to setup a static IP / DNS address (DNS is much preferred) which can be used by the node, and make sure the network / firewalls are properly configured to accept external connections. - - ```bash - cd ~/$WORKSPACE - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host : \ - --full-node-host : \ - --stake-amount 100000000000000 - - # for example, with IP: - - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host 35.232.235.205:6180 \ - --full-node-host 34.135.169.144:6182 \ - --stake-amount 100000000000000 - - # For example, with DNS: - - aptos genesis set-validator-configuration \ - --local-repository-dir ~/$WORKSPACE \ - --username $USERNAME \ - --owner-public-identity-file ~/$WORKSPACE/keys/public-keys.yaml \ - --validator-host bot.aptosdev.com:6180 \ - --full-node-host fn.bot.aptosdev.com:6182 \ - --stake-amount 100000000000000 - ``` - - This will create two YAML files in the `~/$WORKSPACE/$USERNAME` directory: `owner.yaml` and `operator.yaml`. - -9. Download the following files by following the download commands on the [Node Files](../../../node-files-all-networks/node-files.md) page: - - `validator.yaml` - - `fullnode.yaml` - - `genesis.blob` - - `waypoint.txt` - - `haproxy.cfg` - - `haproxy-fullnode.cfg` and - - `blocked.ips` - - `docker-compose-src.yaml` - -10. Copy the `validator.yaml`, `fullnode.yaml` files into ~/$WORKSPACE/config/ directory. - ```bash - mkdir ~/$WORKSPACE/config - cp validator.yaml ~/$WORKSPACE/config/validator.yaml - cp fullnode.yaml ~/$WORKSPACE/config/fullnode.yaml - ``` - - Modify the config files to update the data directory, key path, genesis file path, waypoint path. User must have write access to data directory. - -11. To recap, in your working directory (`~/$WORKSPACE`), you should have a list of files: - - - `config` folder containing: - - `validator.yaml` validator config file - - `fullnode.yaml` fullnode config file - - `keys` folder containing: - - `public-keys.yaml`: Public keys for the owner account, consensus, networking (from step 7). - - `private-keys.yaml`: Private keys for the owner account, consensus, networking (from step 7). - - `validator-identity.yaml`: Private keys for setting the Validator identity (from step 7). - - `validator-full-node-identity.yaml`: Private keys for setting validator full node identity (from step 7). - - `username` folder containing: - - `owner.yaml`: Define owner, operator, and voter mapping. They are all the same account in test mode (from step 8). - - `operator.yaml`: Node information that will be used for both the Validator and the fullnode (from step 8). - - `waypoint.txt`: The waypoint for the genesis transaction (from step 9). - - `genesis.blob` The genesis binary that contains all the information about the framework, validatorSet and more (from step 9). - -12. Start your validator by running the below commands, with the paths assuming you are in the root of the `aptos-core` directory: - - ```bash - cargo clean - cargo build -p aptos-node --release - sudo mv target/release/aptos-node /usr/local/bin - aptos-node -f ~/$WORKSPACE/config/validator.yaml - ``` - - Run validator fullnode on **another machine**: - - ```bash - cargo clean - cargo build -p aptos-node --release - sudo mv target/release/aptos-node /usr/local/bin - aptos-node -f ~/$WORKSPACE/config/fullnode.yaml - ``` - -Optionally, you may set up `aptos-node` to run as a service controlled by `systemctl` in a file resembling: - -```bash -[Unit] -Description=Aptos Node Service - -[Service] -User=nodeuser -Group=nodeuser - -LimitNOFILE=500000 - -#Environment="RUST_LOG=error" -WorkingDirectory=/home/nodeuser/aptos-core -ExecStart=/usr/local/bin/aptos-node -f /home/nodeuser/aptos-mainnet/config/validator.yaml - -Restart=on-failure -RestartSec=3s - -StandardOutput=journal -StandardError=journal -SyslogIdentifier=aptos-node - -[Install] -WantedBy=multi-user.target -``` - -You have completed setting up your node. - -Now proceed to [connecting to the Aptos network](../connect-to-aptos-network.md) and [establishing staking pool operations](../staking-pool-operations.md). \ No newline at end of file diff --git a/developer-docs-site/docs/nodes/validator-node/operator/shutting-down-nodes.md b/developer-docs-site/docs/nodes/validator-node/operator/shutting-down-nodes.md deleted file mode 100644 index abc0e175e96b7..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/shutting-down-nodes.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -title: "Shutting Down Nodes" -slug: "shutting-down-nodes" ---- - -# Shutting Down Nodes - -Follow these instructions to shut down the validator node and validator fullnode, and cleanup the resources used by the nodes. - -## Leaving the validator set - -Before you shutdown the node, make sure to leave the validator set first. This will be become effective in the next epoch. Also note that a node can choose to leave the validator set at anytime, or it would happen automatically when there is insufficient stake in the validator account. To leave the validator set, run the below command, shown using the example profile of `mainnet-operator`: - -```bash -aptos node leave-validator-set --profile mainnet-operator --pool-address -``` - -:::danger Important -If you leave and then rejoin in the same epoch, the rejoin would fail. This is because when you leave, your validator state changes from "active" to "pending_inactive" but not yet "inactive". Hence the rejoin would fail. -::: - -After leaving the validator set, follow any one of the below sections to shut down your nodes. - -## Using source code - -1. Stop your node. -2. Remove the data directory: `rm -r `. -3. Remove the genesis blob file and waypoint file. -4. If you want to reuse your node identity, you can choose to keep these configuration files: - - `private-keys.yaml` - - `validator-identity.yaml` - - `validator-full-node-identity.yaml` - - or else you can delete these files. - -## Using Docker - -1. Stop your node and remove the data volumes: `docker compose down --volumes`. -2. Remove the genesis blob file and waypoint file. -3. If you want to reuse your node identity, you can choose to keep these configuration files: - - `private-keys.yaml` - - `validator-identity.yaml` - - `validator-full-node-identity.yaml` - - or else you can delete these files. - -## Using Terraform - -- Stop your node and delete all the resources: `terraform destroy`. - diff --git a/developer-docs-site/docs/nodes/validator-node/operator/staking-pool-operations.md b/developer-docs-site/docs/nodes/validator-node/operator/staking-pool-operations.md deleted file mode 100644 index 60b3f1a8c70a0..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/staking-pool-operations.md +++ /dev/null @@ -1,295 +0,0 @@ ---- -title: "Staking Pool Operations" -slug: "staking-pool-operations" ---- - -# Staking Pool Operations - -This document describes how to perform [staking](../../../concepts/staking.md) pool operations. Note that a staking pool can only accept stake from the stake pool owner. You can stake only when you meet the minimum staking requirement. See also the related [Delegation Pool Operations](./delegation-pool-operations.md) instructions to accept stake from multiple delegators in order to reach the minimum. - -:::tip Minimum staking requirement -The current required minimum for staking is 1 million APT. -::: - -:::danger -Important There is no upgrade mechanism for the staking contract from staking pool to delegation pool. A new delegation pool would have to be created. -::: - -:::tip Testnet vs Mainnet -The below Aptos CLI command examples use mainnet. Change the `--network` value for testnet and devnet. View the values in [Aptos Blockchain Networks](../../networks.md) to see how profiles can be configured based on the network. -::: - -## Initialize a staking pool - -Before initializing a staking pool, ensure that there is an existing owner account with 1M APT. - -1. Initialize [Aptos CLI](../../../tools/aptos-cli/index.md) with a private key from an existing account, such as a wallet, or create a new account. - -```bash -aptos init --profile mainnet-owner \ - --network mainnet -``` - -You can either enter the private key from an existing wallet, or create new wallet address. - -2. Run the following command to initialize the staking pool: - -```bash -aptos stake create-staking-contract \ ---operator \ ---voter \ ---amount 100000000000000 \ ---commission-percentage 10 \ ---profile mainnet-owner -``` - -3. Once the staking pool has been initialized, you can proceed to [connect to the Aptos Network](./connect-to-aptos-network.md). - -## Perform pool owner operations - -### Transfer coin between accounts - -```bash -aptos account transfer \ - --account \ - --amount \ - --profile mainnet-owner -``` - -### Switch operator - -```bash -aptos stake set-operator \ - --operator-address \ - --profile mainnet-owner -``` - -### Switch voter - -```bash -aptos stake set-delegated-voter \ - --voter-address \ - --profile mainnet-owner -``` - -### Add stake - -```bash -aptos stake add-stake \ - --amount \ - --profile mainnet-owner -``` - -### Increase stake lockup - -```bash -aptos stake increase-lockup --profile mainnet-owner -``` - -### Unlock stake - -```bash -aptos stake unlock-stake \ - --amount \ - --profile mainnet-owner -``` - -### Withdraw stake - -```bash -aptos stake withdraw-stake \ - --amount \ - --profile mainnet-owner -``` - -### Update commission -```bash -aptos move run --function-id 0x1::staking_contract::update_commision \ ---args address: u64: \ ---profile mainnet-owner -``` - - -## Checking your stake pool information - -:::tip How validation works -Before you proceed, see [Validation on the Aptos blockchain](../../../concepts/staking.md#validation-on-the-aptos-blockchain) for a brief overview. -::: - -To check the details of your stake pool, run the below CLI command with the `get-stake-pool` option by providing the `--owner-address` and `--url` fields. - -The below command is for an example owner address `e7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb`. - -:::tip -For testnet or devnet `--url` field values, see [Aptos Blockchain Networks](../../networks.md). -::: - -```bash -aptos node get-stake-pool \ - --owner-address e7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb \ - --profile mainnet-operator -``` - -Example output: - -```json -{ - "Result": [ - { - "state": "Active", - "pool_address": "25c3482850a188d8aa6edc5751846e1226a27863643f5ebc52be4f7d822264e3", - "operator_address": "3bec5a529b023449dfc86e9a6b5b51bf75cec4a62bf21c15bbbef08a75f7038f", - "voter_address": "3bec5a529b023449dfc86e9a6b5b51bf75cec4a62bf21c15bbbef08a75f7038f", - "pool_type": "StakingContract", - "total_stake": 100525929489123, - "commission_percentage": 10, - "commission_not_yet_unlocked": 15949746439, - "lockup_expiration_utc_time": "2022-10-07T07:12:55Z", - "consensus_public_key": "0xb3a7ac1491b0165f08f136c2b02739846b6610084984d5298c2983c4f8e5553284bffca2e3fe2b99167da82717501732", - "validator_network_addresses": [ - "/ip4/35.91.145.164/tcp/6180/noise-ik/0xeddf05470520af91b847f353dd804a04399e1213d130a4260e813527f2c49262/handshake/0" - ], - "fullnode_network_addresses": [], - "epoch_info": { - "epoch": 594, - "epoch_interval_secs": 3600, - "current_epoch_start_time": { - "unix_time": 1665087178789891, - "utc_time": "2022-10-06T20:12:58.789891Z" - }, - "next_epoch_start_time": { - "unix_time": 1665090778789891, - "utc_time": "2022-10-06T21:12:58.789891Z" - } - } - } - ] -} -``` - -### Description of output fields - -**state** -- "Active": Validator is already in the validator set and proposing. -- "Pending_active": Validator will be added to the validator set in the next epoch. **Do not try to join the validator set again before the arrival of next epoch, or else you will receive an error. ** - -**pool_address** -- Use this "pool_address" (not the operator address) in you `validator.yaml` file. If you mistakenly used the operator address, you will receive the message: "Validator not in validator set". - -**commission_percentage** -- This can be set only by the stake pool owner. Operator receives the "commission_percentage" of the generated staking rewards. If you request the commission (you can do so by running the command `aptos stake request-commission`), then at the end of the `lockup_expiration_utc_time` the commission part of the rewards will go to the operator address while the rest will stay in the stake pool and belong to the owner. Here "the commission part of the rewards" means the value of **commission_not_yet_unlocked**. - - For example, in a scenario with a lock up of one month, you call `aptos stake request-commission` every month. This will pay out the commission that was accrued during the previous month but only when unlocked at the end of the previous month. Regardless of how often you run `aptos stake request-commission` during the month, the commission is only paid out upon the completion of `lockup_expiration_utc_time`. - - :::tip Compounding - Note that if you do not request commission for multiple months, your commission will accrue more due to compounding of the **commission_percentage** during these months. - ::: - - -**commission_not_yet_unlocked** -- The amount of commission (amount of APT) that is not yet unlocked. It will be unlocked at the `lockup_expiration_utc_time`. This is the total commission amount available to the operator, i.e., the staking rewards **only** to the operator. This does not include the staking rewards to the owner. - -**lockup_expiration_utc_time** -- The date when the commission will unlock. However, this unlocked commission will not be auto-disbursed. It will only disburse when the command `aptos stake request-commission` is called again. - -**epoch_info** -- Use the [Epoch Converter](https://www.epochconverter.com/) to convert the `unix_time` into human readable time. - -## Requesting commission - -Either an owner or an operator can request commission. You can request commission at the end of a lockup period, i.e., at the end of **lockup_expiration_utc_time**, by running the `aptos stake request-commission` command. Make sure to provide the operator and the owner addresses. See an example command below: - -```bash -aptos stake request-commission \ - --operator-address 0x3bec5a529b023449dfc86e9a6b5b51bf75cec4a62bf21c15bbbef08a75f7038f \ - --owner-address 0xe7be097a90c18f6bdd53efe0e74bf34393cac2f0ae941523ea196a47b6859edb \ - --profile mainnet-operator -``` - -If you run the `aptos stake request-commission` command before the end of the lockup expiration, the command will initiate unlock for any locked commission earned up until that moment in time. - -See example below: - -Month 1 Day 29, you call the command, it would initiate unlock for 29 days worth of commission. - -Month 2, Day 29, if you call the command again, it would disburse the fully unlocked commission from previous month (29 days worth), and initiate commission unlock for Month 1 Day 30 + Month 2 Day 1-29 (30 days worth). - -Month 3, Day 29, if you call the commission again, 30 days of commission would be disbursed, and the a new batch of commission would initiate unlock. - -You can call the command multiple times, and the amount you receive depends on the day when you requested commission unlock previously. - - -Commission is unlocked when `request-commission` is called, the staker unlocks stake, or the staker switches operator. The commission will not be withdrawable until the end of the lockup period. Unlocked commission will continue to earn rewards until the lockup period expires. - - -## Checking your validator performance - -To see your validator performance in the current and past epochs and the rewards earned, run the below command. The output will show the validator's performance in block proposals, and in governance voting and governance proposals. Default values are used in the below command. Type `aptos node get-performance --help` to see default values used. - -```bash -aptos node get-performance \ - --pool-address \ - --profile mainnet-operator -``` - -Example output: - -```json -{ - "Result": { - "current_epoch_successful_proposals": 56, - "current_epoch_failed_proposals": 0, - "previous_epoch_rewards": [ - "12312716242", - "12272043711", - "12312912674", - "12313011054", - "12313109435", - "12180092056", - "12313305136", - "12313403519", - "12313501903", - "12313600288" - ], - "epoch_info": { - "epoch": 68, - "epoch_interval": 3600000000, - "last_epoch_start_time": { - "unix_time": 1665074662417326, - "utc_time": "2022-10-06T16:44:22.417326Z", - "local_time": "Thu Oct 6 16:44:22 2022" - }, - "next_epoch_start_time": { - "unix_time": 1665078262417326, - "utc_time": "2022-10-06T17:44:22.417326Z", - "local_time": "Thu Oct 6 17:44:22 2022" - } - } - } -} -``` - -#### Description of fields - -**current_epoch_successful_proposals** -- Successful leader-validator proposals during the current epoch. Also see [Validation on the Aptos blockchain](../../../concepts/staking.md#validation-on-the-aptos-blockchain) for the distinction between leader-validator and the voter-validator. - -**previous_epoch_rewards** -- An ordered list of rewards earned (APT amounts) for the previous 10 epochs, starting with the 10 epoch in the past. In the above example, a reward of 12312716242 APT was earned 10 epochs past and a reward of 12313600288 APT was earned in the most recent epoch. If a reward is 0 for any epoch, then: - - Either the validator was not part of the validator set in that epoch (could have been in either inactive or pending_active validator state), or - - The validator missed all the leader proposals. - -### Checking the performance for all epochs - -To check the performance of all the epochs since the genesis, run the below command. You can filter the results for your pool address with `grep`, as shown below: - -```bash -aptos node analyze-validator-performance \ - --analyze-mode detailed-epoch-table \ - --profile mainnet-operator \ - --start-epoch 0 | grep -``` - -## Tracking rewards - -`DistributeEvent` is emitted when there is a transfer from staking_contract to the operator or staker (owner). Rewards can be tracked either by listening to `DistributeEvent` or by using the [View function](../../../apis/fullnode-rest-api.md#reading-state-with-the-view-function) to call `staking_contract_amounts`. This will return `accumulated_rewards` and `commission_amount`. diff --git a/developer-docs-site/docs/nodes/validator-node/operator/update-validator-node.md b/developer-docs-site/docs/nodes/validator-node/operator/update-validator-node.md deleted file mode 100644 index 5f5ed86f309f5..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/operator/update-validator-node.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -title: "Update Aptos Validator Node" -slug: "update-validator-node" ---- - -# Update Aptos Validator Node via Failover - -You will likely have to upgrade or replace your validator node (VN) at some point, such as for maintenance or outages. Start anew by [creating a new validator fullnode (VFN)](running-validator-node/index.md). To minimize downtime, we recommend you then convert your live validator fullnode to your validator node, and backfill the validator fullnode. - -Since you are already running [a validator node and a validator fullnode](node-requirements.md), you have at your fingertips the means to replace your validator node immediately. Simply convert your validator fullnode to a validator node and then backfill the validator fullnode with either the updated validator node or an entirely new validator fullnode. - -This page explains how to make this swap, which largely amounts to switching out files and configuration settings between the two nodes. For a community-provided version of this document for Docker setup, see [Failover and migrate Validator Nodes for less downtime](https://forum.aptoslabs.com/t/failover-and-migrate-validator-nodes-for-less-downtime/144846). - -## Prepare - -First, understand the data is almost identical between the two nodes. The VFN is missing the `consensus_db` and `secure-data.json`, but it is otherwise largely ready for conversion into a validator node. - -To failover from an outdated or erroneous validator node to an updated and reliable validator fullnode, follow these steps: - -1. Ensure your machine meets the [validator hardware requirements](node-requirements.md#hardware-requirements). -1. Update your validator fullnode with the latest version of the [Aptos CLI](../../../tools/aptos-cli/install-cli/index.md) -1. Copy the configuration files between the two nodes. See the files in the [validator setup](running-validator-node/index.md) documentation you used for the full list. -1. Synchonize data on the validator fullnode: - * For mainnet, use [state synchronization](../../../guides/state-sync.md). - * For devnet or testnet, [bootstrap a new fullnode from snapshot](../../full-node/bootstrap-fullnode.md). - -## Configure - -Remember to take the normal measures to connect your node to the Aptos network and establish staking pool operations, such as removing the `secure-data.json` file and updating your `account_address` in the `validator-identity.yaml` and `validator-fullnode-identity.yaml` files to your **pool** address. - -See the sections and guides below for full details. - -### Connect to Aptos network - -After deploying your nodes, [connect to the Aptos Network](./connect-to-aptos-network.md). - -### Set up staking pool operations - -After connecting your nodes to the Aptos network, [establish staking pool operations](./staking-pool-operations.md). - -## Failover - -To replace the validator node: - -1. Update DNS to [swap the node network addresses on-chain](./staking-pool-operations.md#3-update-validator-network-addresses-on-chain). -1. Turn down the validator node and validator fullnode intended to replace the validator. -1. Restart the former validator fullnode with the validator node configuration. -1. Observe that before DNS changes take effect that only outbound connections will form. -1. Either reuse the former validator node or create anew to backfill the validator fullnode. -1. Start the validator fullnode. -1. Use [Node Health Checker](../../measure/node-health-checker.md) and follow [Node Liveness Criteria](node-liveness-criteria.md) to ensure the validator node is functioning properly. - -## Run multiple validator fullnodes - -You may want to have a VFN ready for failover or need access to REST APIs for building without any rate limits. Note you have the ability to run a [local multinode network](../../../guides/running-a-local-multi-node-network.md) that may be suitable. - -With caution, you may also run multiple fullnodes on the Aptos network. Note that it is not currently recommended to run multiple VFNs with the same [network identity](../../identity-and-configuration.md) and connect them to the validator using the `vfn` network, as this may cause issues with node metrics and telemetry. - -To run multiple fullnodes and connect them to your validator: - -1. Connect only one fullnode using the `vfn` network configuration in the validator configuration `.yaml` file. This will be your single VFN (as registered on-chain) that other Aptos nodes will connect to. -1. Connect the rest of your fullnodes to the validator using a `public` network configuration *and a different network identity* in the validator configuration `.yaml` file. These will be your additional VFNs that you can use for other purposes. - -Note that because the additional VFNs will not be registered on-chain, other nodes will not know their network addresses and will not be able to to connect to them. These would be for your use only. diff --git a/developer-docs-site/docs/nodes/validator-node/voter/index.md b/developer-docs-site/docs/nodes/validator-node/voter/index.md deleted file mode 100644 index f240e8645ce0a..0000000000000 --- a/developer-docs-site/docs/nodes/validator-node/voter/index.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: "Voter" -slug: "index" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Staking Pool Voter - -If you are a [staking pool](../../../concepts/staking.md) voter, then we recommend strongly that you do not store your Aptos voter keys with a custodian before the custodian supports this function. Until then, we suggest you store your voter keys in an Aptos wallet like [Petra](https://petra.app/) or [Martian](https://martianwallet.xyz/). - -This document describes how to perform voter operations while using an Aptos wallet. - -### Steps Using Governance UI - -To participate as a voter in the Aptos governance, follow the below steps. - -1. Go to the [**Proposals section** of the Aptos Governance page](https://governance.aptosfoundation.org/). -2. Connect your wallet by clicking on **CONNECT WALLET** (top-right): -3. Make sure that wallet is set to connect to Mainnet. -4. View the proposals. When you are ready to vote on a proposal, click on the proposal and vote. -5. You will see a green snackbar indicating that the transaction is successful. - -### Steps Using Aptos CLI - -1. Get your stake pool info `aptos node get-stake-pool --owner-address --url ` -2. To see the list of proposal `aptos governance list-proposals --url https://mainnet.aptoslabs.com` -3. To set up your voter profile run `aptos init` -4. To vote on a proposal `aptos governance vote --proposal-id --pool-address --url --profile ` - -# Delegation Pool Voter - -If you staked to a [delegation pool](../../../concepts/delegated-staking.md), you can vote proportional to your stake amount in the delegation pool or delegate your votes to another voter address. - -### Steps Using Aptos CLI - -To participate as a voter, follow the below steps. - -1. Get your delegation pool address from the [Aptos Explorer page](https://explorer.aptoslabs.com/validators/delegation?network=mainnet). -2. To see the list of proposal `aptos governance list-proposals --url https://mainnet.aptoslabs.com` -3. To set up your voter profile run `aptos init` -4. To vote on a proposal `aptos move run --function-id 0x1::delegation_pool::vote --args address: u64: u64: bool:` - -To delegate your voting power, follow the below steps. - -1. Get your delegation pool address from the [Aptos Explorer page](https://explorer.aptoslabs.com/validators/delegation?network=mainnet). -2. To set up your voter profile run `aptos init` -3. To delegate voting power `aptos move run --function-id 0x1::delegation_pool::delegate_voting_power --args address: address:` diff --git a/developer-docs-site/docs/reference/_category_.json b/developer-docs-site/docs/reference/_category_.json deleted file mode 100644 index 1b337d5a5f38f..0000000000000 --- a/developer-docs-site/docs/reference/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Reference", - "position": 5 -} diff --git a/developer-docs-site/docs/reference/error-codes.md b/developer-docs-site/docs/reference/error-codes.md deleted file mode 100644 index f151924beaa9e..0000000000000 --- a/developer-docs-site/docs/reference/error-codes.md +++ /dev/null @@ -1,164 +0,0 @@ ---- -title: "Aptos Error Codes" -slug: "error-codes" ---- -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Error Codes in Aptos - -This page catalogs common errors encountered in the Aptos blockchain and explains how to resolve them wherever possible. As with all software, the code itself is the source of truth for error handling and will always contain entries not found here. Instead, this matrix aims to help you address those errors most typically found, misunderstood, or both. - -For the sources of these errors, see: - - * [vm_status.rs](https://github.com/move-language/move/blob/main/language/move-core/types/src/vm_status.rs) - * [error.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/move-stdlib/sources/error.move) - * [account.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/account.move) - * [coin.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move) - * [token.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token.move) - * [token_transfers.move](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token_transfers.move) - -Help us update this list by sending pull requests containing the errors you encounter. If you don't know how to resolve the error, as described int the *Action* column, simply leave it blank. - -## Move Virtual Machine (VM) - -| Error | Meaning | Possible Resolution | -| --- | :---: | --- | -|LOOKUP_FAILED| A function that is being called isn't present on the network being used | Check that your dependencies onchain have the same version | -|UNKNOWN_VALIDATION_STATUS|We don't want the default value to be valid.| N/A | -|INVALID_SIGNATURE|The transaction has a bad signature.| Submit a new transaction with a new signature | -|INVALID_AUTH_KEY|Bad account authentication key.| Submit a new transaction with a new signature, check that the account matches the authentication key and hasn't been rotated | -|SEQUENCE_NUMBER_TOO_OLD|Sequence number is too old.| Submit a new transaction with a newer sequence number from the account | -|SEQUENCE_NUMBER_TOO_NEW|Sequence number is too new.| Submit a new transaction with a new signature | -|INSUFFICIENT_BALANCE_FOR_TRANSACTION_FEE|Insufficient balance to pay minimum transaction fee.| Fund the account with more APT to pay for the gas fee | -|TRANSACTION_EXPIRED|The transaction has expired.| Submit a new transaction with an expiration time further in the future | -|SENDING_ACCOUNT_DOES_NOT_EXIST|The sending account does not exist.| Create the account prior to sending the transaction again | -|REJECTED_WRITE_SET|This write set transaction was rejected because it did not meet the requirements for one.| N/A | -|INVALID_WRITE_SET|This write set transaction cannot be applied to the current state.| N/A | -|EXCEEDED_MAX_TRANSACTION_SIZE|Length of program field in raw transaction exceeded max length.| The transaction is too large for a single transaction; if this is a package publish, try to break it into multiple packages | -|UNKNOWN_SCRIPT|This script is not in our allowlist of scripts.| N/A | -|UNKNOWN_MODULE|Transaction is trying to publish a new module.| N/A | -|MAX_GAS_UNITS_EXCEEDS_MAX_GAS_UNITS_BOUND|Max gas units submitted with transaction exceeds max gas units bound in VM.| Decrease the max gas amount in the transaction below the maximum value in the gas schedule | -|MAX_GAS_UNITS_BELOW_MIN_TRANSACTION_GAS_UNITS|Max gas units submitted with transaction not enough to cover the intrinsic cost of the transaction.| Increase the max gas amount above the minimum value in the gas schedule | -|GAS_UNIT_PRICE_BELOW_MIN_BOUND|Gas unit price submitted with transaction is below minimum gas price set in the VM.| Increase the gas unit price below the minimum gas unit price in the gas schedule | -|GAS_UNIT_PRICE_ABOVE_MAX_BOUND|Gas unit price submitted with the transaction is above the maximum gas price set in the VM.| Decrease the gas unit price below the maximum gas unit price in the gas schedule | -|INVALID_GAS_SPECIFIER|Gas specifier submitted is either malformed (not a valid identifier), or does not refer to an accepted gas specifier.| N/A | -|SENDING_ACCOUNT_FROZEN|The sending account is frozen.| N/A | -|UNABLE_TO_DESERIALIZE_ACCOUNT|Unable to deserialize the account blob.| N/A | -|CURRENCY_INFO_DOES_NOT_EXIST|The currency info was unable to be found.| N/A | -|INVALID_MODULE_PUBLISHER|The account sender doesn't have permissions to publish modules.| N/A | -|NO_ACCOUNT_ROLE|The sending account has no role.| N/A | -|BAD_CHAIN_ID|The transaction's chain_id does not match the one published on-chain.| Verify that your chain ID matches the chain ID for your network | -|SEQUENCE_NUMBER_TOO_BIG|The sequence number is too large and would overflow if the transaction were executed.| N/A | -|BAD_TRANSACTION_FEE_CURRENCY|The gas currency is not registered as a TransactionFee currency.| N/A | -|FEATURE_UNDER_GATING|The feature requested is intended for a future Aptos version instead of the current one.| N/A | -|SECONDARY_KEYS_ADDRESSES_COUNT_MISMATCH|The number of secondary signer addresses is different from the number of secondary public keys provided.| Verify the multi-agent or multi-ed25519 secondary signer addresses match the secondary public keys | -|SIGNERS_CONTAIN_DUPLICATES|There are duplicates among signers, including the sender and all the secondary signers.| Remove any duplicate signers | -|SEQUENCE_NONCE_INVALID|The sequence nonce in the transaction is invalid (too new, too old, or already used).| N/A | -|CHAIN_ACCOUNT_INFO_DOES_NOT_EXIST|There was an error when accessing chain-specific account information.| N/A | -|MODULE_ADDRESS_DOES_NOT_MATCH_SENDER|the module publisher is not the account that will eventually hold the module.| Confirm the module address in the move contract matches the sender of the transaction | -|ZERO_SIZED_STRUCT|Reported when a struct has zero fields.| N/A | -|DUPLICATE_MODULE_NAME|The sender is trying to publish two modules with the same name in one transaction.| Confirm every module has a unique name | -|BACKWARD_INCOMPATIBLE_MODULE_UPDATE|The sender is trying to publish a module that breaks the compatibility checks.| Confirm your new modules being published don't break backwards compatibility | -|CYCLIC_MODULE_DEPENDENCY|The updated module introduces a cyclic dependency (i.e., A uses B and B also uses A).| Check for loops in your module dependencies in the modules being published | -|INVALID_FRIEND_DECL_WITH_SELF|Cannot mark the module itself as a friend.| Confirm no module has itself marked as a friend in the modules being published | -|INVALID_FRIEND_DECL_WITH_MODULES_OUTSIDE_ACCOUNT_ADDRESS|Cannot declare modules outside of account address as friends.| Confirm all friends are in the same account address in the modules being published | -|INVALID_FRIEND_DECL_WITH_MODULES_IN_DEPENDENCIES|Cannot declare modules that this module depends on as friends.| Check friend declarations of the modules being published | -|CYCLIC_MODULE_FRIENDSHIP|The updated module introduces a cyclic friendship (i.e., A friends B and B also friends A).| Check friend declarations of the modules being published | -|INVALID_PHANTOM_TYPE_PARAM_POSITION|A phantom type parameter was used in a non-phantom position.| Confirm phantom types are used only with generics | -|LOOP_MAX_DEPTH_REACHED|Loops are too deeply nested.| Check for many nested loops | -|TYPE_RESOLUTION_FAILURE|Failed to resolve type due to linking being broken after verification.| N/A | -|RESOURCE_DOES_NOT_EXIST|We tried to access a resource that does not exist under the account.| Check the contract and possibly change it to handle resources that don't exist | -|RESOURCE_ALREADY_EXISTS|We tried to create a resource under an account where that resource already exists.| Check the contract and possibly change it to handle resources that already exist | -|UNKNOWN_STATUS|A reserved status to represent an unknown vm status. This is std::u64::MAX, but we can't pattern match on that, so put the hardcoded value in.| N/A | -|LINKER_ERROR|This may be due to the function has not been published on chain or by trying to call an invalid function as the result of either an incorrect account address, module name, or function name. This might not happen locally if the sources are available locally but have yet to be published on-chain.| There are many reasons, but you should check your account addresses, module names, and function names to determine that they're correct and published | - -## Move Standard Library (stdlib) - -| Error | Meaning | -| --- | :---: | -|INVALID_ARGUMENT|Caller specified an invalid argument (http: 400).| -|OUT_OF_RANGE|An input or result of a computation is out of range (http: 400).| -|INVALID_STATE|The system is not in a state where the operation can be performed (http: 400).| -|UNAUTHENTICATED|Request not authenticated due to missing, invalid, or expired auth token (http: 401).| -|PERMISSION_DENIED|The client does not have sufficient permission (http: 403).| -|NOT_FOUND|A specified resource is not found (http: 404).| -|ABORTED|Concurrency conflict, such as read-modify-write conflict (http: 409).| -|ALREADY_EXISTS|The resource that a client tried to create already exists (http: 409).| -|RESOURCE_EXHAUSTED|Out of gas or other forms of quota (http: 429).| -|CANCELLED|Request cancelled by the client (http: 499).| -|INTERNAL|Internal error (http: 500).| -|NOT_IMPLEMENTED|Feature not implemented (http: 501).| -|UNAVAILABLE|The service is currently unavailable. Indicates that a retry could solve the issue (http: 503).| - -## Aptos accounts - -| Error | Meaning | Possible Resolution | -| --- | :---: | --- | -|EACCOUNT_ALREADY_EXISTS|Account already exists.| N/A | -|EACCOUNT_DOES_NOT_EXIST|Account does not exist.| Create the account first | -|ESEQUENCE_NUMBER_TOO_BIG|Sequence number exceeds the maximum value for a u64.| Provide a smaller sequence number | -|EMALFORMED_AUTHENTICATION_KEY|The provided authentication key has an invalid length.| Check your authentication key; it should be a 32-byte vector | -|ECANNOT_RESERVED_ADDRESS|Cannot create account because address is reserved.| N/A | -|EOUT_OF_GAS|Transaction exceeded its allocated max gas.| Increase the max gas amount | -|EWRONG_CURRENT_PUBLIC_KEY|Specified current public key is not correct.| Confirm the public key matches the account | -|EINVALID_PROOF_OF_KNOWLEDGE|Specified proof of knowledge required to prove ownership of a public key is invalid.| Check your proof of knowledge in key rotation to ensure it has proper signatures | -|ENO_CAPABILITY|The caller does not have a digital-signature-based capability to call this function.| Confirm you have the capability for the called functions | -|EINVALID_ACCEPT_ROTATION_CAPABILITY|The caller does not have a valid rotation capability offer from the other account.| Confirm the account being rotated is correct | -|ENO_VALID_FRAMEWORK_RESERVED_ADDRESS|Address to create is not a valid reserved address for Aptos framework.| N/A | -|EINVALID_SCHEME|Specified scheme required to proceed with the smart contract operation - can only be ED25519_SCHEME(0) OR MULTI_ED25519_SCHEME(1).| Confirm the transaction was signed correctly when creating the account | -|EINVALID_ORIGINATING_ADDRESS|Abort the transaction if the expected originating address is different from the originating address on-chain.| Confirm you are rotating the correct account's key | -|ENO_SUCH_SIGNER_CAPABILITY|The signer capability doesn't exist at the given address.| Confirm the address is correct | - -## Aptos coins - -| Error | Meaning | Possible Resolution | -| --- | :---: | --- | -|ECOIN_INFO_ADDRESS_MISMATCH|Address of account which is used to initialize a coin `CoinType` doesn't match the deployer of module.| Create the coin using a `CoinType` in the same account creating the coin. | -|ECOIN_INFO_ALREADY_PUBLISHED|`CoinType` is already initialized as a coin.| N/A | -|ECOIN_INFO_NOT_PUBLISHED|`CoinType` hasn't been initialized as a coin.| Create the coin with `CoinType` first before using it | -|ECOIN_STORE_ALREADY_PUBLISHED|Account already has `CoinStore` registered for `CoinType`.| N/A | -|ECOIN_STORE_NOT_PUBLISHED|Account hasn't registered `CoinStore` for `CoinType`.| Register the account for the `CoinType` | -|EINSUFFICIENT_BALANCE|Not enough coins to complete transaction.| Transfer less coins, or acquire more coins prior to the transfer | -|EDESTRUCTION_OF_NONZERO_TOKEN|Cannot destroy non-zero coins.| N/A | -|EZERO_COIN_AMOUNT|Coin amount cannot be zero.| Don't burn coins or conduct other actions with zero coins | -|EFROZEN|CoinStore is frozen. Coins cannot be deposited or withdrawn.| Account is frozen for this token; talk to the coin owner | -|ECOIN_SUPPLY_UPGRADE_NOT_SUPPORTED|Cannot upgrade the total supply of coins to different implementation.| N/A | -|ECOIN_NAME_TOO_LONG|Name of the coin is too long.| Coin name must be less than or equal to 32 characters | -|ECOIN_SYMBOL_TOO_LONG|Symbol of the coin is too long.| Coin symbol must be less than or equal to 10 characters | - -## Aptos tokens - -| Error | Meaning | -| --- | :---: | -|EALREADY_HAS_BALANCE|The token has balance and cannot be initialized.| -|ECOLLECTIONS_NOT_PUBLISHED|There isn't any collection under this account.| -|ECOLLECTION_NOT_PUBLISHED|Cannot find collection in creator's account.| -|ECOLLECTION_ALREADY_EXISTS|The collection already exists.| -|ECREATE_WOULD_EXCEED_COLLECTION_MAXIMUM|Exceeds the collection's maximal number of token_data.| -|EINSUFFICIENT_BALANCE|Insufficient token balance.| -|EINVALID_TOKEN_MERGE|Cannot merge the two tokens with different token IDs.| -|EMINT_WOULD_EXCEED_TOKEN_MAXIMUM|Exceed the token data maximal allowed.| -|ENO_BURN_CAPABILITY|No burn capability.| -|ETOKEN_DATA_ALREADY_EXISTS|TokenData already exists.| -|ETOKEN_DATA_NOT_PUBLISHED|TokenData not published.| -|ETOKEN_STORE_NOT_PUBLISHED|TokenStore doesn't exist.| -|ETOKEN_SPLIT_AMOUNT_LARGER_THAN_TOKEN_AMOUNT|Cannot split token to an amount larger than its amount.| -|EFIELD_NOT_MUTABLE|The field is not mutable.| -|ENO_MUTATE_CAPABILITY|Not authorized to mutate.| -|ENO_TOKEN_IN_TOKEN_STORE|Token not in the token store.| -|EUSER_NOT_OPT_IN_DIRECT_TRANSFER|User didn't opt-in direct transfer.| -|EWITHDRAW_ZERO|Cannot withdraw 0 token.| -|ENFT_NOT_SPLITABLE|Cannot split a token that only has 1 amount.| -|ENO_MINT_CAPABILITY|No mint capability| -|ECOLLECTION_NAME_TOO_LONG|The collection name is too long.| -|ENFT_NAME_TOO_LONG|The NFT name is too long.| -|EURI_TOO_LONG|The URI is too long.| -|ENO_DEPOSIT_TOKEN_WITH_ZERO_AMOUNT|Cannot deposit a token with 0 amount.| -|ENO_BURN_TOKEN_WITH_ZERO_AMOUNT|Cannot burn 0 token.| -|EWITHDRAW_PROOF_EXPIRES|Withdraw proof expires.| -|EOWNER_CANNOT_BURN_TOKEN|Token is not burnable by owner.| -|ECREATOR_CANNOT_BURN_TOKEN|Token is not burnable by creator.| -|ECANNOT_UPDATE_RESERVED_PROPERTY|Reserved fields for token contract. Cannot be updated by user.| -|EURI_TOO_SHORT|URI too short.| -|ETOKEN_OFFER_NOT_EXIST|Token offer doesn't exist.| diff --git a/developer-docs-site/docs/reference/glossary.md b/developer-docs-site/docs/reference/glossary.md deleted file mode 100755 index 4749389b6aace..0000000000000 --- a/developer-docs-site/docs/reference/glossary.md +++ /dev/null @@ -1,466 +0,0 @@ ---- -title: "Aptos Glossary" -slug: "glossary" ---- - -# Aptos Glossary - -## A - -### Accumulator Root Hash - -- An **accumulator root hash** is the root hash of a [Merkle accumulator.](https://eprint.iacr.org/2009/625.pdf) - -### Account - -- An **account** in the Aptos blockchain is a container for an arbitrary number of [Move modules](#move-module) and [Move resources](#move-resources). This essentially means that the state of each [account](../concepts/accounts.md) is comprised of both code and data. -- The account is identified by [account address](#account-address). - -See [Accounts](../concepts/accounts.md) for more information. - -### Account Address - -- An **account address** is the address of an Aptos account. -- Account address refers to a specific destination on the Aptos network. The address dictates the destination and source of a specific amount of assets exchanged by two parties on the blockchain. -- An Aptos address is a 64-character hex string, and sometimes it can be shortened by stripping leading 0s and prefixing `0x`. This makes a hex-encoded 32 byte Aptos account address. - -See [Accounts](../concepts/accounts.md) for more information. - -### API - -- An **Application Programming Interface (API)(** is a set of protocols and tools that allow users to interact with Aptos blockchain nodes and client networks via external applications. Aptos offers a REST API for this purpose. See the [Aptos API reference](https://aptos.dev/nodes/aptos-api-spec#/) documentation and [Use the Aptos API](../apis/fullnode-rest-api.md) for more details. - -### APT - -**Aptos token (APT)** is the Aptos blockchain native token used for paying network and transaction fees. - -### Aptos - -**Aptos** is a Layer 1 blockchain for everyone. It uses the Move programming language and launched its mainnet on 2022-10-17 to redefine the web3 user experience. The Aptos blockchain is dedicated to creating better user experiences through increased speed, security, scalability, reliability and usability with low transaction costs. The word “Aptos" means "The People" in the Ohlone language. See the [Aptos White Paper](../aptos-white-paper/index.md) for more details. - -### AptosBFT - -- **AptosBFT** is the Aptos protocol's BFT consensus algorithm. -- AptosBFT is based on Jolteon. - -### Aptos Blockchain - -- The **Aptos blockchain** is a ledger of immutable transactions agreed upon by the validators on the Aptos network (the network of validators). - -### Aptos Name Service (ANS) - -- The **Aptos Name Service (ANS)** is a decentralized naming address service for the Aptos blockchain. An Aptos name is a human-readable *.apt* domain name that is used in place of a public key, for example *love.apt*. -- This service also allows users to register subdomain names in addition to the registered domain. Find out more at: [Aptosnames.com](https://www.aptosnames.com/) - -### Aptos-core - -**Aptos-core** is the open source technology on which the Aptos Payment Network runs. Aptos-core contains software for - -* the Aptos blockchain itself, which generates and stores the immutable ledger of confirmed transactions and -* the validation process, which implements the consensus algorithm to validate transactions and add them to the Aptos blockchain immutable ledger. - -### Aptos Ecosystem - -- **Aptos ecosystem** refers to various components of the Aptos blockchain network and their interactions. The Aptos ecosystem includes the community, community-driven projects, and events. See [Contribute to the Aptos Ecosystem](../community/index.md) for all possible ways to join Aptos. - -### Aptos Explorer - -- The **[Aptos Explorer](https://explorer.aptoslabs.com/)** is an interface that helps users examine details of the Aptos blockchain, including account information, validators, and transactions. -- The Aptos Explorer help users validate their work in Aptos wallets and other tools in the blockchain. Find more details at [Use the Aptos Explorer](../guides/explore-aptos.md). - -### Aptos Framework -The **Aptos Framework** defines the public API for blockchain updates and the structure of on-chain data. It defines the business logic and access control for the three key pillars of Aptos functionality: payments, treasury, and on-chain governance. It is implemented as a set of modules written in the Move programming language and stored on-chain as Move bytecode. - -### Aptos Node -An **Aptos node** is a peer entity of the Aptos network that tracks the state of the Aptos blockchain. There are two types of Aptos nodes, [validators](#validator) and [fullnodes](#fullnode)). - -### Aptos Protocol - -- **Aptos protocol** is the specification of how transactions are submitted, ordered, executed, and recorded within the Aptos network. - -### AptosAccount - -- A **`AptosAccount`** is a Move resource that holds all the administrative data associated with an account, such as sequence number, balance, and authentication key. -- A **`AptosAccount`** is the only resource that every account is guaranteed to contain. - -### AptosAccount module - -- **The AptosAccount module** is a Move module that contains the code for manipulating the administrative data held in a particular `AptosAccount.T` resource. -- Code for checking or incrementing sequence numbers, withdrawing or depositing currency, and extracting gas deposits is included in the AptosAccount module. - -### Aptos devnet - -- See [devnet](#devnet). - -## B - -### Byzantine (Validator) - -- A **validator** that does not follow the specification of the consensus protocol, and wishes to compromise the correct execution of the protocol. -- BFT algorithms traditionally support up to one-third of the algorithm's voting power being held by Byzantine validators. - -### Byzantine Fault Tolerance (BFT) - -- **Byzantine Fault Tolerance** (BFT) is the ability of a distributed system to provide safety and liveness guarantees in the presence of faulty, or “[Byzantine](#byzantine-validator),” validators below a certain threshold. -- The Aptos blockchain uses AptosBFT, a consensus protocol based on [Jolteon](#Jolteon). -- BFT algorithms typically operate with a number of entities, collectively holding N votes (which are called “validators” in the Aptos network’s application of the system). -- N is chosen to withstand some number of validators holding f votes, which might be malicious. -- In this configuration, N is typically set to 3f+1. Validators holding up to f votes will be allowed to be faulty — offline, malicious, slow, etc. As long as 2f+1 votes are held by [honest](#honest-validator) validators, they will be able to reach consensus on consistent decisions. -- This implies that BFT consensus protocols can function correctly, even if up to one-third of the voting power is held by validators that are compromised or fail. - -## C - -### CLI - -- **Command line interface** refers to the Aptos CLI used for developing on the Aptos blockchain, operating nodes, and debugging issues. Find out more at [Use the Aptos CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md). - -### Client - -- **Client** is software that receives information from the blockchain and manages transactions. Clients interact with the blockchain through the Aptos nodes. - -### Code labs - -- **Code labs and tutorials** depict various workflows - such as the use of the Aptos CLI in minting non-fungible tokens (NFTs) - in order for users to understand how the process works and employ related functions in their code. If users have the necessary funds in their accounts, they can follow the same code lab and tutorial steps used in devnet, testnet and mainnet networks. - -### Consensus - -- **Consensus** is a component of a validator. -- The consensus component is responsible for coordination and agreement amongst all validators on the block of transactions to be executed, their order, and the execution results. -- The Aptos blockchain is formed with these agreed-upon transactions and their corresponding execution results. -- The consensus component is accountable for achieving security, trust, and agreement among all validators on the Aptos blockchain. - -### Consensus Protocol - -- A **consensus protocol** is collectively executed by n validators to accept or reject a transaction and to agree on the ordering of transactions and [execution results](#execution-result). -- See [BFT](#byzantine-fault-tolerance-bft). - -## D - -### dApps - -- **Decentralized applications (dApps)** are programs or digital applications that run on the Aptos blockchain autonomously. Smart contracts are commonly used to achieve this function. - -### devnet - -- The **Aptos devnet** is a publicly deployed instance of the Aptos network that runs using a set of validator test nodes. -- The devnet is a demonstration of the Aptos network that is built for experimenting with new ideas -- The devnet simulates a digital payment system and the coins on the devnet have _no real world value_. -- The devnet is the network by which developers are given the opportunity to test given protocols. It is similar to testnet as it operates independently of the mainnet yet is reset weekly. - -## E - -### Ed25519 - -- **Ed25519** is our supported digital signature scheme. -- More specifically, the Aptos network uses the PureEdDSA scheme over the Ed25519 curve, as defined in RFC 8032. - -### Event - -- An **event** is the user-facing representation of the effects of executing a transaction. -- A transaction may be designed to emit any number of events as a list. For example, a `Coin` transfer emits a `WithdrawEvent` for the sender account and a `DepositEvent` for the recipient account. -- In the Aptos protocol, events provide evidence that the successful execution of a transaction resulted in a specific effect. The `DepositEvent` (in the above example) allows the recipient to confirm that a payment was received into their account. -- Events are persisted on the blockchain and are used to answer queries by [clients](#client). - -### Execution - -- **Execution** in the Aptos blockchain is an Aptos node component that manages the block of transactions. The execution component stores successful transactions. - -### Expiration Time - -A transaction ceases to be valid after its **expiration time**. If it is assumed that: - -- Time_C is the current time that is agreed upon between validators (Time_C is not the local time of the client); -- Time_E is the expiration time of a transaction T_N; and -- Time_C > Time_E and transaction T_N has not been included in the blockchain, - -then there is a guarantee that T_N will never be included in the blockchain. - -## F - -### Faucet - -- **Faucet** is a service that mints APT on devnet and testnet. APT on these networks has no real world value, it is only for development purposes. -- You can use the faucet in a few different ways: - - With the [Aptos CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md#fund-an-account-with-the-faucet). - - Through a wallet, such as Petra, Martian, or Pontem. You can find a full list [here](https://github.com/aptos-foundation/ecosystem-projects#wallets). - - Using an SDK, for example by using the `FaucetClient` in the TypeScript SDK. - - With a direct HTTP request. Learn how to do this [here](guides/system-integrators-guide.md#calling-the-faucet-other-languages). - -### Fullnodes - -- **Fullnodes** are clients that ensure data are stored up-to-date on the network. They replicate blockchain state and transactions from other fullnodes and validator nodes. - -### Fungible Asset - -- A **fungible asset** is an asset, such as a currency, share, in-game resource, etc., that is interchangeable with another identical asset without any loss in its value. For example, APT is a fungible asset because you can exchange one APT for another. -- Follow the [Asset Standards](../standards/index.md#asset-standards) to create fungible assets on the Aptos blockchain. -- Next generation of the Coin standard that addresses shortcomings of `aptos_framework::coin` such as lack of guaranteed enforcement of freeze and burn and advanced functionalities such as programmable transfers, e.g., approve in ERC-20. - -### Fungible Token - -- For the legacy Aptos Token Standard (aptos_token::token), a **fungible token** is a token that is interchangeable with other identical tokens (i.e., tokens that share the same `TokenId`). This means the tokens have the same `creator address`, `collection name`, `token name`, and `property version`. -- For the Aptos Digital Asset Standard (aptos_token_objects::token), a **fungible token** is a fungible asset with metadata object that includes a Digital Asset resource. - -### Fungible Unit - -- A **fungible unit** is an individual unit of a fungible asset. These units are identical and interchangeable without any loss in value. For example, each Octa (the smallest unit of APT) is a fungible unit. - -## G - -### Gas - -- **Gas** is a way to pay for computation and storage on a blockchain network. All transactions on the Aptos network cost a certain amount of gas. -- The gas required for a transaction depends on the size of the transaction, the computational cost of executing the transaction, and the amount of additional global state created by the transaction (e.g., if new accounts are created). -- The purpose of gas is regulating demand for the limited computational and storage resources of the validators, including preventing denial of service (DoS) attacks. - -### Gas Unit Price - -- Each transaction specifies the **gas unit price** the sender is willing to pay per unit of gas. -- The price of gas required for a transaction depends on the current demand for usage of the network. - -## H - -### Honest (Validator) - -- **Honesty** means a validator that faithfully executes the consensus protocol and is not Byzantine. - -### Jolteon - -- **Jolteon** is a recent proposal for a [BFT](#byzantine-fault-tolerance-bft) consensus protocol. -- AptosBFT, the Aptos network's consensus algorithm, is based on Jolteon. -- It simplifies the reasoning about safety, and it addresses some performance limitations of previous consensus protocols. In particular, it reduces latency by 33% compared to HotStuff. - -## I - -### Indexer - -- **[Indexer](../indexer/indexer-landing.md)** is the component of Aptos that retrieves, processes, and efficiently stores raw data in the database to provide speedy access to the Aptos blockchain state. - -## L - -### Leader - -- A **leader** is a validator that proposes a block of transactions for the consensus protocol. -- In leader-based protocols, nodes must agree on a leader to make progress. -- Leaders are selected by a function that takes the current [round number](https://fb.quip.com/LkbMAEBIVNbh#ffYACAO6CzD) as input. - -## M - -### Mainnet - -- **Mainnet** refers to a working, fully-operational blockchain. A mainnet network has been fully deployed and performs the functionality of transferring digital currency from a sender to a recipient. - -### Maximum Gas Amount - -- The **Maximum Gas Amount** of a transaction is the maximum amount of gas the sender is ready to pay for the transaction. -- The gas charged is equal to the gas price multiplied by units of gas required to process this transaction. If the result is less than the max gas amount, the transaction has been successfully executed. -- If the transaction runs out of gas while it is being executed or the account runs out of balance during execution, then the sender will be charged for gas used and the transaction will fail. - -### Mempool - -- **Mempool** is one of the components of the validator. It holds an in-memory buffer of transactions that have been submitted but not yet agreed upon and executed. Mempool receives transactions from [JSON-RPC Service](#json-rpc-service). -- Transactions in the mempool of a validator are added from the JSON-RPC Service of the current node and from the mempool of other Aptos nodes. -- When the current validator is the leader, its consensus component pulls the transactions from its mempool and proposes the order of the transactions that form a block. The validator quorum then votes on the proposal. - -### Merkle Trees - -- **Merkle tree** is a type of authenticated data structure that allows for efficient verification of data integrity and updates. -- The Aptos network treats the entire blockchain as a single data structure that records the history of transactions and states over time. -- The [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree) implementation simplifies the work of apps accessing the blockchain. It allows apps to: - - Read any data from any point in time. - - Verify the integrity of the data using a unified framework. - -### Merkle Accumulator - -- The **[Merkle Accumulator](https://www.usenix.org/legacy/event/sec09/tech/full_papers/crosby.pdf)** is an _append-only_ Merkle tree that the Aptos blockchain uses to store the ledger. -- Merkle accumulators can provide proofs that a transaction was included in the chain (“proof of inclusion”). -- They are also called "history trees" in literature. - -### Module - -- A **module** in the Move programming language may either be a program or library that can create, transfer, or store assets. - -### Move - -- **Move** is a new programming language that implements all the transactions on the Aptos blockchain. -- It has two different kinds of code — [transaction scripts](#transaction-script) and [Move modules](#move-module). -- Move is a safe and secure programming language for web3 that emphasizes access control and scarcity. It is the programming language used to build the Aptos blockchain. You can read more about it in [Move on Aptos](../move/move-on-aptos.md). - -### Move Bytecode - -- Move programs are compiled into **Move bytecode**. -- Move bytecode is used to express transaction scripts and Move modules. - -### Move Module - -- A **Move module** defines the rules for updating the global state of the Aptos blockchain. -- In the Aptos protocol, a Move module is a **smart contract**. -- Each user-submitted transaction includes a transaction script. The transaction script invokes procedures of one or more Move modules to update the global state of the blockchain according to the rules. - -### Move Resources - -- **Move resources** contain data that can be accessed according to the **procedures** declared in a Move **module.** -- Move resources can never be copied, reused, or lost. This protects Move programmers from accidentally or intentionally losing track of a resource. - -### Move Virtual Machine (MVM) - -- The **Move virtual machine** executes transaction scripts written in [Move bytecode](#move-bytecode) to produce an [execution result](#execution-result). This result is used to update the blockchain **state**. -- The virtual machine is part of a [validator](#validator). -- The Move virtual machine (MoveVM) processes each validator node that translates transactions along with the current blockchain ledger state to produce a changeset as input or storage delta as output. - -## N - -### Node - -- A **node** is a peer entity of the Aptos network that tracks the state of the Aptos blockchain. -- An Aptos node consists of logical components. [Mempool](#mempool), [consensus](#consensus), and the [virtual machine](#virtual-machine) are examples of node components. - -### Nonce - -- **Nonce** is a number only used once, a random or semi-random number that is generated for a specific use for authentication protocols and cryptographic hash functions. - -## O - -### Open-Source Community - -- **Open-source community** is a term used for a group of developers who work on open-source software. If you're reading this glossary, then you are part of the Aptos project's developer community. - -## P - -### Proof - -- A **proof** is a way to verify the accuracy of data in the blockchain. -- Every operation in the Aptos blockchain can be verified cryptographically that it is indeed correct and that data has not been omitted. -- For example, if a user queries the information within a particular executed transaction, they will be provided with a cryptographic proof that the data returned to them is correct. - -### PoS - -**Proof-of-Stake (PoS)** is a security mechanism that serves in confirming the uniqueness and legitimacy of blockchain transactions. The PoS consensus mechanism is leveraged by the Aptos blockchain powered by a network of validators, which in turn update the system and process transactions. - -## R - -### Resource Account - -- A **resource account** is used to manage resources independent of an account managed by a user. For example, a developer may use a resource account to manage an account for module publishing, say managing a contract. - -- The contract itself does not require a signer post initialization. A resource account gives you the means for the module to provide a signer to other modules and sign transactions on behalf of the module. - -See [Resource accounts](../move/move-on-aptos/resource-accounts.md) for instructions on use. - -### REST Service - -- The **REST Service** component is the external interface of an Aptos node. Any incoming client request, such as submitted transactions or queries, must first go through the REST Service. A client needs to go through the REST Service component to access storage or any other component in the system. This filters requests and protects the system. -- Whenever a client submits a new transaction, the REST Service passes it to [mempool](#mempool). - -### Round - -- A **round** consists of achieving consensus on a block of transactions and their execution results. - -### Round Number - -- A **round number** is a shared counter used to select leaders during an [epoch](#epoch) of the consensus protocol. - -## S - -### SDKs - -- Aptos **software development kits (SDKs)** are sets of tools that enable a developer to quickly create a custom app on the Aptos platform. Find out more at [Use the Aptos SDKs](../sdks/index.md). - -### Sequence Number - -- The **sequence number** for an account indicates the number of transactions that have been submitted and committed on chain from that account. It is incremented every time a transaction sent from that account is executed or aborted and stored in the blockchain. -- A transaction is executed only if it matches the current sequence number for the sender account. This helps sequence multiple transactions from the same sender and prevents replay attacks. -- If the current sequence number of an account A is X, then a transaction T on account A will only be executed if T's sequence number is X. -- These transactions will be held in mempool until they are the next sequence number for that account (or until they expire). -- When the transaction is applied, the sequence number of the account will become X+1. The account has a strictly increasing sequence number. - -### Sender - -- _Alternate name_: Sender address. -- **Sender** is the address of the originator account for a transaction. A transaction must be signed by the originator. - -### Smart Contract - -- **Smart contract** refers to a computer program that automatically and directly carries out the contract's terms. -- See [Move Module](#move-module) for related details. - -### State - -- A **state** in the Aptos protocol is a snapshot of the distributed database. -- A transaction modifies the database and produces a new and updated state. - -### State Root Hash - -- **State root hash** is a [Merkle hash](https://en.wikipedia.org/wiki/Merkle_tree) over all keys and values the state of the Aptos blockchain at a given version. - -## T - -### Table - -- A [**table**](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/doc/table.md) implements the Table type and in Aptos is used to store information as key-value data within an account at large scale. - -See [`table.move`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-stdlib/sources/table.move) for the associated Aptos source file. - -### Testnet - -- **Testnet** describes the Aptos network that is not fully functional yet more stable than devnet; it is an alternative network to mainnet to be used for testing. - -### Tokens - -- **Tokens** are digital units of value issued on a blockchain. They can be redeemed for assets or value held. Tokens can be of the types: Fungible Token (FT), Non-Fungible Token (NFT), and Semi-Fungible Token (SFT). - -### Transaction - -- A raw **transaction** contains the following fields: - - [Sender (account address)](#account-address) - - [Transaction script](#transaction-script) - - [Gas price](#gas-price) - - [Maximum gas amount](#maximum-gas-amount) - - [Sequence number](#sequence-number) - - [Expiration time](#expiration-time) -- A signed transaction is a raw transaction with the digital signature. -- An executed transaction changes the state of the Aptos blockchain. - -### Transaction (or Move) Script - -- Each transaction submitted by a user includes a **transaction script**. -- These transactions, also know as Move scripts, represent the operations a client submits to a validator. -- The operation could be a request to move coins from user A to user B, or it could involve interactions with published [Move modules](#move-module) (smart contracts). -- The transaction script is an arbitrary program that interacts with resources published in the global storage of the Aptos blockchain by calling the procedures of a module. It encodes the logic for a transaction. -- A single transaction script can send funds to multiple recipients and invoke procedures from several different modules. -- A transaction script **is not** stored in the global state and cannot be invoked by other transaction scripts. It is a single-use program. - -To see example uses of transaction scripts, follow [Move scripts](../move/move-on-aptos/move-scripts.md) and the [Your First Multisig](../tutorials/first-multisig.md) tutorial. - -## V - -### Validator - -- _Alternate name_: Validators. -- A **validator** is an entity of the Aptos ecosystem that validates on the Aptos blockchain. It receives requests from clients and runs consensus, execution, and storage. -- A validator maintains the history of all the transactions on the blockchain. -- Internally, a validator needs to keep the current state, to execute transactions, and to calculate the next state. -- Aptos validators are in charge of verifying transactions. - -### Validator Nodes - -- **Validator nodes** are a unique class of fullnodes that take part in consensus, specifically a Byzantine Fault Tolerance (BFT) consensus protocol in Aptos. Validators agree upon transactions to be added to the Aptos blockchain as well as the order in which they are added. - -### Version - -- A **version** is also called “height” in blockchain literature. -- The Aptos blockchain doesn't have an explicit notion of a block — it only uses blocks for batching and executing transactions. -- A transaction at height 0 is the first transaction (genesis transaction), and a transaction at height 100 is the 101st transaction in the transaction store. - -## W - -### Well-Formed Transaction - -An Aptos transaction is **well formed** if each of the following conditions are true for the transaction: - -- The transaction has a valid signature. -- An account exists at the sender address. -- It includes a public key, and the hash of the public key matches the sender account's authentication key. -- The sequence number of the transaction matches the sender account's sequence number. -- The sender account's balance is greater than the [maximum gas amount](#maximum-gas-amount). -- The expiration time of the transaction has not passed. diff --git a/developer-docs-site/docs/reference/move.md b/developer-docs-site/docs/reference/move.md deleted file mode 100644 index b5f470292a77e..0000000000000 --- a/developer-docs-site/docs/reference/move.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -title: "Move Reference Documentation" -id: "move" ---- -import Link from "@site/src/components/Link"; -import MoveReference from "@site/src/components/MoveReference"; - - diff --git a/developer-docs-site/docs/reference/telemetry.md b/developer-docs-site/docs/reference/telemetry.md deleted file mode 100644 index c635252771f16..0000000000000 --- a/developer-docs-site/docs/reference/telemetry.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: "Telemetry" -slug: "telemetry" ---- - -When you operate a node on an Aptos network, your node can be set to send telemetry data to Aptos Labs. You can disable telemetry at any point. If telemetry remains enabled, Aptos node binary will send telemetry data in the background. - -The Aptos node binary running on your node collects telemetry data such as software version, operating system information and the IP address of your node. This telemetry data is used to enhance the decentralization of the network. - -:::tip No personal information is collected -The Aptos node binary does **not** collect personal information such as usernames or email addresses. -::: - -## Metrics collected - -### Core metrics - -- Core metrics: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/core_metrics.rs#L14-L29](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/core_metrics.rs#L14-L29). - -### Node information - -The public IP address of the node and core metrics, including node type, synced version and number of network connections. - -- **Node configuration as a mapping of string key to JSON map**: [https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs#L63-L97](https://github.com/aptos-labs/aptos-core/blob/main/config/src/config/mod.rs#L63-L97). - -### CLI telemetry - -The commands and subcommands run by the Aptos CLI tool. - -- **CLI metrics**: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/cli_metrics.rs#L12-L15](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/cli_metrics.rs#L12-L15). -- **Build information**: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-build-info/src/lib.rs#L8-L20](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-build-info/src/lib.rs#L8-L20). - -### Network metrics - -- **Network metrics**: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/network_metrics.rs#L12-L17](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/network_metrics.rs#L12-L17). - -### Build information - -Rust build information including the versions of Rust, cargo, build target architecture and the build tag. - -- **Build information**: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-build-info/src/lib.rs#L8-L20](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-build-info/src/lib.rs#L8-L20) - -### System information - -System information including operating system information (including versions), hardware information and resource utilization (including CPU, memory and disk). - -- **System information**: [https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/system_information.rs#L14-L32](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos-telemetry/src/system_information.rs#L14-L32). - -### Others - -- **Metrics**: All the [Prometheus](https://prometheus.io/) metrics that are collected within the node. -- **Logs**: Logs of warn-level and higher level, with the ability to collect up to debug logs. - -## Disabling telemetry - -On macOS and Linux, you can set the following environment variables to control the metrics sent by your node. For example, to disable all telemetry, set the `APTOS_DISABLE_TELEMETRY` environment variable to `true` as shown below: - -```bash -export APTOS_DISABLE_TELEMETRY=true -``` - -The above example only disables telemetry for a single session in the current terminal where you ran the above command. To disable it permanently on your node, include it in your startup profile, as below: - -```bash -echo "export APTOS_DISABLE_TELEMETRY=true" >> ~/.profile -source ~/.profile -``` - -:::tip All telemetry is ON by default. -All the below variables are set by default to `false`, i.e., sending of these telemetry metrics is enabled. Set them to `true` to disable telemetry. -::: - -- `APTOS_DISABLE_TELEMETRY`: This disables all telemetry emission from the node including sending to the GA4 service. -- `APTOS_FORCE_ENABLE_TELEMETRY`: This overrides the chain ID check and forces the node to send telemetry regardless of whether remote service accepts or not. -- `APTOS_DISABLE_TELEMETRY_PUSH_METRICS`: This disables sending the [Prometheus](https://prometheus.io/) metrics. -- `APTOS_DISABLE_TELEMETRY_PUSH_LOGS`: This disables sending the logs. -- `APTOS_DISBALE_TELEMETRY_PUSH_EVENTS`: This disables sending the custom events. -- `APTOS_DISABLE_LOG_ENV_POLLING`: This disables the dynamic ability to send verbose logs. -- `APTOS_DISABLE_PROMETHEUS_NODE_METRICS`: This disables sending the node resource metrics such as system CPU, memory, etc. diff --git a/developer-docs-site/docs/releases/index.md b/developer-docs-site/docs/releases/index.md deleted file mode 100644 index 9f16256966ffb..0000000000000 --- a/developer-docs-site/docs/releases/index.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: "Latest Aptos Releases" -slug: "aptos-releases" ---- - -# Latest Aptos Releases - -Each Aptos release can be found on the GitHub site for [Aptos-core releases](https://github.com/aptos-labs/aptos-core/releases). Each release is mirrored by the following git branches: - -* [Latest Mainnet Release](https://github.com/aptos-labs/aptos-core/tree/mainnet) -* [Latest Testnet Release](https://github.com/aptos-labs/aptos-core/tree/testnet) -* [Latest Devnet Release](https://github.com/aptos-labs/aptos-core/tree/devnet) - -Aptos typically conducts multiple devnet releases for each testnet and mainnet release. Hence, devnet releases use commit hashes for tracking rather than version numbers. Testnet and mainnet releases usually have a one-to-one correlation, meaning each testnet release rolls into mainnet. - -Hot fixes are exceptions that may occur in mainnet to address urgent issues in production. See the [Aptos Release Process](https://github.com/aptos-labs/aptos-core/blob/main/RELEASE.md) for more details. - -## Update nodes - -If you are a node operator, [update your nodes with the new release](../nodes/full-node/update-fullnode-with-new-releases.md). - -## Subscribe to Release Announcements - -### Subscribe via GitHub -1. Go to the [aptos-labs/aptos-core](https://github.com/aptos-labs/aptos-core) repository. -2. Open the **Watch** menu and select **Custom**. -3. Select the **Releases** checkbox and click **Apply**. - -### Subscribe via [Aptos Discord](https://discord.gg/aptosnetwork) -Join the Aptos Discord server to interact with us and our community. We also post upcoming releases in these channels. - - * [#mainnet-release](https://discord.com/channels/945856774056083548/1042502400507916349) - * [#testnet-release](https://discord.com/channels/945856774056083548/1025614160555413545) - * [#devnet-release](https://discord.com/channels/945856774056083548/956692649430093904) - -### Subscribe via [Aptos Twitter](https://twitter.com/AptosRelease) -Follow @AptosRelease on Twitter to get the latest updates about our upcoming mainnet releases and be notified when it is time to update your node. -Every couple of days, @AptosRelease will tweet a countdown to remind you to update to the latest version. *Note: We do not post about hotfixes here!* - -## Aptos Release Process -To understand how we conduct releases, review the [Aptos Release Process](https://github.com/aptos-labs/aptos-core/blob/main/RELEASE.md). diff --git a/developer-docs-site/docs/sdks/index.md b/developer-docs-site/docs/sdks/index.md deleted file mode 100644 index 6dac31b2baa6a..0000000000000 --- a/developer-docs-site/docs/sdks/index.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -title: "Use Aptos SDKs" -slug: "index" -hidden: false ---- - -# Use the Aptos SDKs - -Use these Aptos software development kits (SDKs), in combination with the [Aptos CLI](/tools/aptos-cli/use-cli/use-aptos-cli.md) for your development on the Aptos blockchain. - -- ### [TypeScript SDK](ts-sdk/index.md) - -- ### [Python SDK](python-sdk.md) - -- ### [Rust SDK](rust-sdk.md) - -- ### [Unity SDK](unity-sdk.md) - -To get started, [develop with the Aptos SDKs](../tutorials/index.md) following our tutorials. diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/account.md b/developer-docs-site/docs/sdks/new-ts-sdk/account.md deleted file mode 100644 index afa50e8438011..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/account.md +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: "Account" ---- - -# Account - -The SDK provides an `Account` class for creating and managing [accounts](../../concepts/accounts.md) on Aptos network. - -Following [AIP-55](https://github.com/aptos-foundation/AIPs/blob/main/aips/aip-55.md) the SDK supports `Legacy` and `Unified` authentications. `Legacy` includes `ED25519` and `MultiED25519` and `Unified` includes `SingleSender` and `MultiSender` authenticators. - -- `SingleSender` supports any single signer authenticator (currently is `ED25519` and `Secp256k1`) -- `MultiSender` supports any multi signers authenticator (Currently is `MultiED25519`) - -The `Account` class supports different static methods to generate and/or derive an account - -- `Account.generate()` -- `Account.fromPrivateKey()` -- `Account.fromPrivateKeyAndAddress()` -- `Account.fromDerivationPath()` - -## Generate a new account - -To generate a new account (or a new key pair), the SDK provides a `generate()` static method on the `Account` class. - -Account generation supports all current Aptos supported key schemes, `Legacy Ed25519`, `Single Sender Ed25519` and `Single Sender Secp256k1`. - -```ts -const account = Account.generate(); // defaults to Legacy Ed25519 -const account = Account.generate({ scheme: SingingSchemeInput.Secp256k1 }); // Single Sender Secp256k1 -const account = Account.generate({ scheme: SingingSchemeInput.Ed25519, legacy: false }); // Single Sender Ed25519 -``` - -:::note -Creating an account with the SDK creates it locally, to create the account on chain we should fund it. - -```ts -const transaction = await aptos.fundAccount({ accountAddress: account.accountAddress, amount: 100 }); -``` - -::: - -## Derive an account from private key - -The SDK supports deriving an account from a private key with `fromPrivateKey()` static method. -This method uses a local calculation and therefore is used to derive an `Account` that has not had its authentication key rotated. - -```ts -// to derive an account with a legacy Ed25519 key scheme -const privateKey = new Ed25519PrivateKey(privateKeyBytes); -const account = Account.fromPrivateKey({ privateKey }); - -// to derive an account with a Single Sender Ed25519 key scheme -const privateKey = new Ed25519PrivateKey(privateKeyBytes); -const account = Account.fromPrivateKey({ privateKey, legacy: false }); - -// to derive an account with a Single Sender Secp256k1 key scheme -const privateKey = new Secp256k1PrivateKey(privateKeyBytes); -const account = Account.fromPrivateKey({ privateKey }); -``` - -## Derive an account from private key and address - -The SDK supports deriving an account from a private key and address with `fromPrivateKeyAndAddress()` static method. - -```ts -// to derive an account with a legacy Ed25519 key scheme -const privateKey = new Ed25519PrivateKey(privateKeyBytes); -const accountAddress = AccountAddress.from(address); -const account = Account.fromPrivateKeyAndAddress({ privateKey, address: accountAddress, legacy: true }); - -// to derive an account with a Single Sender Ed25519 key scheme -const privateKey = new Ed25519PrivateKey(privateKeyBytes); -const accountAddress = AccountAddress.from(address); -const account = Account.fromPrivateKeyAndAddress({ privateKey, address: accountAddress, legacy: false }); - -// to derive an account with a Single Sender Secp256k1 key scheme -const privateKey = new Secp256k1PrivateKey(privateKeyBytes); -const accountAddress = AccountAddress.from(address); -const account = Account.fromPrivateKeyAndAddress({ privateKey, address: accountAddress }); -``` - -## Derive an account from derivation path - -The SDK supports deriving an account from derivation path with `fromDerivationPath()` static method. - -```ts -// to derive an account with a legacy Ed25519 key scheme -const { mnemonic, address, path } = wallet; -const acccount = Account.fromDerivationPath({ - path, - mnemonic, - scheme: SigningSchemeInput.Ed25519, -}); - -// to derive an account with a Single Sender Ed25519 key scheme -const { mnemonic, address, path } = wallet; -const acccount = Account.fromDerivationPath({ - path, - mnemonic, - scheme: SigningSchemeInput.Ed25519, - legacy: false, -}); - -// to derive an account with a Single Sender Secp256k1 key scheme -const { mnemonic, address, path } = wallet; -const acccount = Account.fromDerivationPath({ - path, - mnemonic, - scheme: SigningSchemeInput.Secp256k1Ecdsa, -}); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/fetch-data-from-chain.md b/developer-docs-site/docs/sdks/new-ts-sdk/fetch-data-from-chain.md deleted file mode 100644 index 334bb52c4fc26..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/fetch-data-from-chain.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: "Fetch data from chain" ---- - -# TS SDK Fetch data from chain - -Once we created a new [Aptos instance](./sdk-configuration.md), we get access to all the sdk functionality. We now can query the chain for data. - -The SDK provides built in queries to easily query the chain with most used or popular queries. The SDK resolves those queries to Aptos [fullnode](https://fullnode.mainnet.aptoslabs.com/v1/spec#/) or [Indexer](https://cloud.hasura.io/public/graphiql?endpoint=https://indexer.mainnet.aptoslabs.com/v1/graphql) as needed and ease the burden on the developer to know and understand what service they need to query. - -```ts -const aptos = new Aptos(); - -const fund = await aptos.getAccountInfo({ accountAddress: "0x123" }); -const modules = await aptos.getAccountTransactions({ accountAddress: "0x123" }); -const tokens = await aptos.getAccountOwnedTokens({ accountAddress: "0x123" }); -``` - -## Queries with generics - -Some query responses do not provide the full response type as the SDK can't infer the actual type. For that we might want to provide a generic type for the response type, so we can access the response properties that are not included in the API type. - -For example, for the `getAccountResource` query we can define the `resource` to query but the SDK can't infer the response type and we can't have access to the response properties. - -For that we support generic response types for different queries. - -```ts -type Coin = { coin: { value: string } }; - -const resource = await aptos.getAccountResource({ - accountAddress: testAccount.accountAddress, - resourceType: "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", -}); - -// Now we have access to the response type property -const value = resource.coin.value; -``` - -## `options` input argument - -We can provide queries with an `options` input as query parameters. For those queries that support this option, an `option` input param is available - -```ts -const resource = await aptos.getAccountResource({ - accountAddress: alice.accountAddress, - resourceType: "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>", - options: { ledgerVersion: 12 }, -}); - -const tokens = await aptos.getAccountOwnedTokens({ - accountAddress: alice.accountAddress, - options: { - tokenStandard: "v2", - pagination: { offset: 0, limit: 10 }, - orderBy: [{ last_transaction_version: "desc" }], - }, -}); -``` - -## Wait for Indexer to sync up - -Sometimes we use Indexer service to fetch data, this is because we can not get complex data direct from fullnode or some queries are not supported with the fullnode API. -Since Indexer indexes the chain, it might take it some time to catch up with the latest ledger version and we can end up not getting the real time data. - -For that, the SDK supports an optional input argument `minimumLedgerVersion`. We can pass a ledger version to sync up to, before querying. -If no version provided, the SDK will not wait for Indexer to sync up. - -```ts -const tokens = await aptos.getAccountOwnedTokens({ - accountAddress: alice.accountAddress, - minimumLedgerVersion: 1234, -}); -``` - -To get the latest ledger version we can - -1. Query for the ledger info - -```ts -const ledgerInfo = await aptos.getLedgerInfo(); - -const ledgerVersion = ledgerInfo.ledger_version; -``` - -2. If we just committed a transaction with the SDK, we can use `waitForTransaction` method, that would return us a `CommittedTransactionResponse` that holds the latest ledger version - -```ts -const response = await aptos.waitForTransaction({ transactionHash: pendingTransaction.hash }); - -const tokens = await aptos.getAccountOwnedTokens({ - accountAddress: alice.accountAddress, - minimumLedgerVersion: BigInt(response.version), -}); -``` - -## Use namespace - -The `Aptos` class holds different namespaces related to the query operation we seek to do. For example, all `account` related queries are under the `aptos.account` namespace. -Once we intiate the `Aptos` class, all namespaces will be available for as with autocomplete along with all the possible API functions. - -Thought we dont need to specify the namespace when making a query, it can be beneficial while developing. - -```ts -const aptos = new Aptos() -aptos.< list of available API functions and namespaces > -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/http-client.md b/developer-docs-site/docs/sdks/new-ts-sdk/http-client.md deleted file mode 100644 index f2853607b3254..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/http-client.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: "HTTP Client" ---- - -# TS SDK HTTP Client - -## Default HTTP Client - -The SDK uses [@aptos-labs/aptos-client](https://www.npmjs.com/package/@aptos-labs/aptos-client) library with the ability to modify some request configurations like AUTH_TOKEN, HEADERS, etc. - -The `@aptos-labs/aptos-client` package supports `http2` protocol and implements 2 clients environment based: - -- **axios** - To use in a browser environment (in a browser env it is up to the browser and the server to negotiate http2 connection) -- **got** - To use in a node environment (to support http2 in node environment, still the server must support http2 also) - -## Custom HTTP Client - -Sometimes developers want to set custom configurations or use a specific http client for queries. - -The SDK supports a custom client configuration as a function with this signature: - -```ts -(requestOptions: ClientRequest): Promise> -``` - -:::note -Both `ClientRequest` and `ClientResponse` are types defined in the SDK. -::: - -```ts -async function customClient(requestOptions: ClientRequest): Promise> { - .... -} - -const config = new AptosConfig({ client: { provider: customClient } }); -const aptos = new Aptos(config); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/index.md b/developer-docs-site/docs/sdks/new-ts-sdk/index.md deleted file mode 100644 index 85eaea954cffe..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/index.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -title: "TypeScript Index" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import ThemedImage from '@theme/ThemedImage'; - -# Aptos TypeScript SDK - - - -:::note -This documentation is for the new TypeScript SDK [@aptos-labs/ts-sdk](https://github.com/aptos-labs/aptos-ts-sdk). You can find the documentation for the legacy SDK (aka `aptos`) [here](../ts-sdk/index.md) - -Looking to migrate to the **new TypeScript SDK**? check out the [migration guide](./migration-guide.md) -::: - -## Overview - -Aptos provides a fully supported TypeScript SDK with the source code in the [aptos-ts-sdk GitHub](https://github.com/aptos-labs/aptos-ts-sdk) repository. -The Aptos TypeScript SDK provides a convenient way to interact with the Aptos blockchain using TypeScript. It offers a set of utility functions, classes, and types to simplify the integration process and enhance developer productivity. - -- **Developer experience** Strongly typed APIs and Interfaces, autocomplete, comprehensive documentation. -- **Stability** Test suite runs against Aptos fullnode and indexer with a local network -- **Transaction Builder** Intuitive and simplified transaction builder flow -- **Serialization/deserialization support** Full nested serialization/deserialization support and Move sub-classes to easily serialize and deserialize Move types - -## Installation - - - - -```bash - pnpm i @aptos-labs/ts-sdk -``` - - - - -```bash - npm i @aptos-labs/ts-sdk -``` - - - - -```bash - yarn add @aptos-labs/ts-sdk -``` - - - - -```bash - bun i @aptos-labs/ts-sdk -``` - - - - -## Quick Start - -### Set up Aptos - -```ts -const aptos = new Aptos(); // default to devnet - -// with custom configuration -const aptosConfig = new AptosConfig({ network: Network.TESTNET }); -const aptos = new Aptos(aptosConfig); -``` - -### Fetch data from chain - -```ts -const ledgerInfo = await aptos.getLedgerInfo(); -const modules = await aptos.getAccountModules({ accountAddress: "0x123" }); -const tokens = await aptos.getAccountOwnedTokens({ accountAddress: "0x123" }); -``` - -### Transfer APT coin transaction - -```ts -const transaction = await aptos.transferCoinTransaction({ - sender: alice, - recipient: bob.accountAddress, - amount: 100, -}); -const pendingTransaction = await aptos.signAndSubmitTransaction({ signer: alice, transaction }); -``` - -### Build and submit transaction - -```ts -// generate a new account key pair -const alice: Account = Account.generate(); - -// create the account on chain -await aptos.fundAccount({ accountAddress: alice.accountAddress, amount: 1000 }); - -// submit transaction to transfer APT coin from Alice to Bob -const bobAddress = "0xb0b"; - -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// using sign and submit separately -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -const pendingTransaction = await aptos.submit.transaction({ transaction, senderAuthenticator }); - -// using signAndSubmit combined -const pendingTransaction = await aptos.signAndSubmitTransaction({ signer: alice, transaction }); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/migration-guide.md b/developer-docs-site/docs/sdks/new-ts-sdk/migration-guide.md deleted file mode 100644 index 5953c18ddb167..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/migration-guide.md +++ /dev/null @@ -1,195 +0,0 @@ ---- -title: "Migration Guide" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# TS SDK Migration Guide - -If you are coming from an earlier version `1.x.x` of `aptos`, you will need to make the following updates. - -:::note -In this guide we only mention the API differences and updates you would need to do and excluding new features implementation -::: - -## Install the SDK - -The TypeScript SDK V2 is under a new [github repo](https://github.com/aptos-labs/aptos-ts-sdk) and with a new package name - `@aptos-labs/ts-sdk` - - - - -```bash - pnpm i @aptos-labs/ts-sdk -``` - - - - -```bash - npm i @aptos-labs/ts-sdk -``` - - - - -```bash - yarn add @aptos-labs/ts-sdk -``` - - - - -```bash - bun i @aptos-labs/ts-sdk -``` - - - - -## SDK usage and query the Aptos chain - -Remove all `<*>Client` modules (i.e `AptosClient`, `FaucetClient`, `CoinClient`, etc) and replace with an `Aptos` entry point class - -**V1** - -```ts -const faucetClient = new FaucetClient(NODE_URL, FAUCET_URL); -const aptosClient = new AptosClient(NODE_URL); -const indexerClient = new IndexerClient(INDEXER_URL); -const tokenClient = new TokenClient(aptosClient); -``` - -**V2** -:::tip -Read more about it [here](./sdk-configuration.md) -::: - -```ts -const aptos = new Aptos(); - -// make queries -const fund = await aptos.fundAccount({ accountAddress: "0x123", amount: 100 }); -const modules = await aptos.getAccountModules({ accountAddress: "0x123" }); -const tokens = await aptos.getAccountOwnedTokens({ accountAddress: "0x123" }); -``` - -## Configuration class - -Introduce `AptosConfig` class that holds the config information for the SDK. Once define it we can pass and use it with the `Aptos` class - -```ts -const aptosConfig = new AptosConfig({ network: Network.TESTNET }); // default to devnet -const aptos = new Aptos(config); -``` - -## Transaction Builder Flow - -Removed all separate transaction functions in favor of a more simplified and friendlier transaction builder flow - -**V1** - -```ts -const aptosClient = new AptosClient(NODE_URL); - -// bcs serialized arguments payload -const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( - TxnBuilderTypes.EntryFunction.natural( - "0x1::aptos_account", - "transfer", - [], - [bcsToBytes(TxnBuilderTypes.AccountAddress.fromHex(receiver.address()))], - ), -); -// generate a raw transaction -const transaction = await client.generateRawTransaction(sender.address(), entryFunctionPayload); - -// non-serialized arguments payload -const payload: Gen.TransactionPayload = { - type: "entry_function_payload", - function: "0x1::aptos_account::transfer", - type_arguments: [], - arguments: [account2.address().hex(), 100000], -}; -// generate a raw transaction -const transaction = await client.generateTransaction(account1.address(), payload); - -// sign transaction -const signedTransaction = AptosClient.generateBCSTransaction(sender, transaction); -// submit transaction -const txn = await client.submitSignedBCSTransaction(signedTransaction); -``` - -**V2** -:::tip -Read more about it [here](./transaction-builder.md) -::: - -```ts -const aptos = new Aptos(); - -// non-serialized arguments transaction -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// bcs serialized arguments transaction -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - data: { - function: "0x1::coin::transfer", - typeArguments: [parseTypeTag("0x1::aptos_coin::AptosCoin")], - functionArguments: [bobAddress, new U64(100)], - }, -}); -// sign transaction -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -// submit transaction -const committedTransaction = await aptos.submit.transaction({ transaction, senderAuthenticator }); -``` - -## Account - -Rename `AptosAccount` to `Account` and use static methods to generate / derive an account - -**V1** - -```ts -// generate a new account (or key pair) OR derive from private key OR derive from private key and address -const account = new AptosAccount(); // supports only Legacy Ed25519 - -// derive account from derivation path -const account = AptosAccount.fromDerivePath(..) -``` - -**V2** - -:::tip -Read more about it [here](./account.md) -::: - -```ts -// generate a new account (or key pair) -const account = Account.generate(); // defaults to Legacy Ed25519 -const account = Account.generate({ scheme: SingingSchemeInput.Secp256k1 }); // Single Sender Secp256k1 -const account = Account.generate({ scheme: SingingSchemeInput.Ed25519, legacy: false }); // Single Sender Ed25519 - -// derive account from private key -const account = Account.fromPrivateKey({ privateKey }); - -// derive account from private key and address -const account = Account.fromPrivateKeyAndAddress({ privateKey, address: accountAddress }); - -// derive account from derivation path -const acccount = Account.fromDerivationPath({ - path, - mnemonic, - scheme: SigningSchemeInput.Ed25519, -}); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/move-types.md b/developer-docs-site/docs/sdks/new-ts-sdk/move-types.md deleted file mode 100644 index e59eea66a4213..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/move-types.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: "Move Types" ---- - -# TS SDK Move Types - -When developing on Aptos, and specifically working with the SDK, developers often need to handle Move types serialization and deserialization. Whether is to construct a transaction payload, build a raw transaction or read BCS data. - -The SDK provides a convenient Move sub-classes to easily interact with move types to perform serialization or deserialization operations. -Each class has a `serialize`, `serializeForEntryFunction` and `serializeForScriptFunction` methods and a `deserialize` static class. - -In addition, for complex types like `Vector` the SDK supports nested serialization and deserialization. - -## Move primitive types - -Classes to handle Move primitive types: - -- U8 -- U16 -- U32 -- U64 -- U128 -- U256 -- Bool -- AccountAddress - -```ts -const serializer = new Serializer(); - -const u8 = new U8(1); -u8.serialize(serializer); -u8.serializeForEntryFunction(serializer); -u8.serializeForScriptFunction(serializer); - -const deserializer = new Deserializer(); -U8.deserialize(deserializer); -``` - -## Move struct types - -- MoveVector -- MoveString -- MoveOption - -```ts -const serializer = new Serializer(); - -const moveString = new MoveString("hello world"); -moveString.serialize(serializer); -moveString.serializeForEntryFunction(serializer); -moveString.serializeForScriptFunction(serializer); - -const deserializer = new Deserializer(); -MoveString.deserialize(deserializer); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/sdk-configuration.md b/developer-docs-site/docs/sdks/new-ts-sdk/sdk-configuration.md deleted file mode 100644 index 51b778a0ced3f..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/sdk-configuration.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: "SDK Configuration" ---- - -# TS SDK Configuration - -## `Aptos` class - -The first step to interact with the Aptos chain using the SDK it to intansiate an `Aptos` class. This class is the main entry point into Aptos's APIs. - -```ts -const aptos = new Aptos(); -``` - -## `AptosConfig` class - -Sometimes you might want to use custom configurations when interacting with the `Aptos` chain. For that we have `AptosConfig` class that holds the config information for the SDK client instance. - -```ts -const aptosConfig = new AptosConfig({...}) -``` - -## Available configuration - -```ts -/** The Network that this SDK is associated with. Defaults to DEVNET */ -readonly network: Network; - -/** - * The client instance the SDK uses. Defaults to `@aptos-labs/aptos-client` - */ -readonly client: Client; - -/** - * The optional hardcoded fullnode URL to send requests to instead of using the network - */ -readonly fullnode?: string; - -/** - * The optional hardcoded faucet URL to send requests to instead of using the network - */ -readonly faucet?: string; - -/** - * The optional hardcoded indexer URL to send requests to instead of using the network - */ -readonly indexer?: string; - -/** - * A configuration object we can pass with the request to the server. - */ -readonly clientConfig?: ClientConfig; - -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/transaction-builder.md b/developer-docs-site/docs/sdks/new-ts-sdk/transaction-builder.md deleted file mode 100644 index c13192ac503b4..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/transaction-builder.md +++ /dev/null @@ -1,235 +0,0 @@ ---- -title: "Transaction Builder" ---- - -# TS SDK Transaction Builder - -The SDK provides a simplified and meaningful transaction builder flow to handles the transaction creation lifecycle. - -The transaction builder is separated to different namespaces for each transaction step in the transaction submission flow. -Each namespace/step can be accessed by initiating the [Aptos class](./sdk-configuration.md) - -- **build** - Build a raw transaction that can be signed and then submitted to chain -- **simulate** - Simulate a transaction before signing and submitting to chain -- **sign** - Sign a raw transaction to later submit to chain -- **submit** - Submit a transaction to chain - -Each step provides supports to all the different transaction types Aptos supports - - -- **simple transaction** - Single signer -- **complex transaction** - Sponsor and multi agent - -## Submit transaction - -### Simple transaction - -```ts -// build a transaction -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// using sign and submit separately -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -const committedTransaction = await aptos.submit.transaction({ transaction, senderAuthenticator }); - -// using signAndSubmit combined -const committedTransaction = await aptos.signAndSubmitTransaction({ signer: alice, transaction }); -``` - -### Complex transaction - Multi agent - -```ts -// build a transaction -const transaction = await aptos.build.multiAgentTransaction({ - sender: alice.accountAddress, - secondarySignerAddresses: [secondarySignerAccount.accountAddress], - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// sign transaction -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -const secondarySignerAuthenticator = aptos.sign.transaction({ signer: secondarySignerAccount, transaction }); -// submit transaction -const committedTransaction = await aptos.submit.multiAgentTransaction({ - transaction, - senderAuthenticator, - additionalSignersAuthenticators: [secondarySignerAuthenticator], -}); -``` - -### Complex transaction - Simple transaction with Sponsor transaction - -```ts -// build a transaction -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - withFeePayer: true, - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// sign transaction -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -const feePayerSignerAuthenticator = aptos.sign.transactionAsFeePayer({ - signer: feePayerAccount, - transaction, -}); -// submit transaction -const committedTransaction = await aptos.submit.transaction({ - transaction, - senderAuthenticator, - feePayerAuthenticator: feePayerSignerAuthenticator, -}); -``` - -### Complex transaction - Multi agent with Sponsor transaction - -```ts -// build a transaction -const transaction = await aptos.build.multiAgentTransaction({ - sender: alice.accountAddress, - secondarySignerAddresses: [secondarySignerAccount.accountAddress], - withFeePayer: true, - data: { - function: "0x1::coin::transfer", - typeArguments: ["0x1::aptos_coin::AptosCoin"], - functionArguments: [bobAddress, 100], - }, -}); - -// sign transaction -const senderAuthenticator = aptos.sign.transaction({ signer: alice, transaction }); -const secondarySignerAuthenticator = aptos.sign.transaction({ signer: secondarySignerAccount, transaction }); -const feePayerSignerAuthenticator = aptos.sign.transactionAsFeePayer({ - signer: feePayerAccount, - transaction, -}); -// submit transaction -const committedTransaction = await aptos.submit.multiAgentTransaction({ - transaction, - senderAuthenticator, - additionalSignersAuthenticators: [secondarySignerAuthenticator], - feePayerAuthenticator: feePayerSignerAuthenticator, -}); -``` - -## Simulate transaction - -### Simple transaction - -```ts -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - data: { - function: "0x1::coin::transfer", - functionArguments: [bobAddress, 100], - }, -}); -const [userTransactionResponse] = await aptos.simulate.transaction({ - signerPublicKey: alice.publicKey, - transaction, -}); -``` - -### Complex transaction - Multi agent - -```ts -const transaction = await aptos.build.multiAgentTransaction({ - sender: alice.accountAddress, - secondarySignerAddresses: [secondarySignerAccount.accountAddress], - data: { - function: "0x1::coin::transfer", - functionArguments: [bobAddress, 100], - }, -}); -const [userTransactionResponse] = await aptos.simulate.multiAgentTransaction({ - signerPublicKey: alice.publicKey, - transaction, - secondarySignersPublicKeys: [secondarySignerAccount.publicKey], -}); -``` - -### Complex transaction - Simple transaction with Sponsor transaction - -```ts -const transaction = await aptos.build.transaction({ - sender: alice.accountAddress, - withFeePayer: true, - data: { - function: "0x1::coin::transfer", - functionArguments: [bobAddress, 100], - }, -}); -const [userTransactionResponse] = await aptos.simulate.transaction({ - signerPublicKey: alice.publicKey, - transaction, - feePayerPublicKey: feePayerAccount.publicKey, -}); -``` - -### Complex transaction - Multi agent with Sponsor transaction - -```ts -const transaction = await aptos.build.multiAgentTransaction({ - sender: alice.accountAddress, - secondarySignerAddresses: [secondarySignerAccount.accountAddress], - withFeePayer: true, - data: { - function: "0x1::coin::transfer", - functionArguments: [bobAddress, 100], - }, -}); -const [userTransactionResponse] = await aptos.simulate.multiAgentTransaction({ - signerPublicKey: alice.publicKey, - transaction, - secondarySignersPublicKeys: [secondarySignerAccount.publicKey], - feePayerPublicKey: feePayerAccount.publicKey, -}); -``` - -## Transaction Management - -The TypeScript SDK provides a transaction management layer to submit as many transaction for a single account as possible while respecting a high throughput. - -Read more about it [here](https://aptos.dev/guides/transaction-management) - -In the SDK, the transaction management layer implements 2 components - -- `AccountSequenceNumber` that handles and manages an account sequence number. -- `TransactionWorker` that provides a simple framework for receiving payloads to be processed - -To use and leverage the transaction management layer, we provide an array of payloads to the batch function that in turns pass it into the worker to process and generate transactions and submit it to chain. - -```ts -const aptos = new Aptos(); -const sender = Account.generate(); -await aptos.fundAccount({ accountAddress: sender.accountAddress, amount: 10000000000 }) -// recipients is an array of accounts -const recipients = [Account.generate(),Account.generate(),Account.generate()] - -// create payloads -const payloads: InputGenerateTransactionPayloadData[] = []; - -for (let i = 0; i < recipients.length; i += 1) { - const txn: InputGenerateTransactionPayloadData = { - function: "0x1::aptos_account::transfer", - functionArguments: [recipients[i].accountAddress, 10], - }; - payloads.push(txn); -} - -await aptos.batchTransactionsForSingleAccount({ sender, data: payloads })); -``` diff --git a/developer-docs-site/docs/sdks/new-ts-sdk/typescript.md b/developer-docs-site/docs/sdks/new-ts-sdk/typescript.md deleted file mode 100644 index b90f394837f34..0000000000000 --- a/developer-docs-site/docs/sdks/new-ts-sdk/typescript.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Typescript" ---- - -# TS SDK Typescript - -Aptos SDK is designed to be type-safe! Things to keep in mind: - -- Types currently require using TypeScript `v5.2.2` or greater. -- Changes to types in this repository are considered non-breaking and are usually released as patch semver changes (otherwise every type enhancement would be a major version!). -- It is highly recommended that you lock your `@aptos-labs/ts-sdk` package version to a specific patch release and upgrade with the expectation that types may be fixed or upgraded between any release. - -## Local types - -The SDK exports types on the top level and defines and holds all types locally and not using any external type generator excluding for Indexer GraphQL schema that even then the SDK customizes the generated types to be more user friendly and understandable. - -You can check the types the SDK supports and exports on the [typedoc site](https://aptos-labs.github.io/aptos-ts-sdk/) organized by SDK version diff --git a/developer-docs-site/docs/sdks/python-sdk.md b/developer-docs-site/docs/sdks/python-sdk.md deleted file mode 100644 index 0962cf2993673..0000000000000 --- a/developer-docs-site/docs/sdks/python-sdk.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -title: "Python SDK" -slug: "python-sdk" ---- - -# Aptos Python SDK - -Aptos provides a lightly maintained official Python SDK. It is available on [PyPi](https://pypi.org/project/aptos-sdk/) with the source code in the [Aptos-core GitHub repository](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/python/sdk). Much of the functionality of the Python SDK mirrors the [Typescript SDK](./ts-sdk/index.md). The primary purpose of the Python SDK is to help Python developers to quickly become familiar with Aptos and as an accompaniment to Aptos tutorials. - -## Installing Python SDK - -The Python SDK can either be installed via `pip`, from source, or embedded: - -### Install with pip - -To install via `pip`: - -```bash -pip3 install aptos-sdk -``` - -The `aptos-sdk` will be installed in the local site packages directory. For example, on macOS, you will find the `aptos-sdk` in the `~/Library/Python/3.8/lib/python/site-packages/aptos_sdk` directory. - -### Install from the source code - -To install from source: - -```bash -git clone https://github.com/aptos-labs/aptos-core -cd aptos-core/ecosystem/python/sdk -python3 setup.py install --user -``` - -### Install by embedding - -To embed the Python SDK into your existing Python project: - -``` -cd /path/to/python/project -cp -r /path/to/aptos-core/ecosystem/python/sdk/aptos-sdk aptos-sdk -``` - -## Using the Python SDK - -See the [Developer Tutorials](../tutorials/index.md) for code examples showing how to use the Python SDK. \ No newline at end of file diff --git a/developer-docs-site/docs/sdks/rust-sdk.md b/developer-docs-site/docs/sdks/rust-sdk.md deleted file mode 100644 index 7b4a06a82706b..0000000000000 --- a/developer-docs-site/docs/sdks/rust-sdk.md +++ /dev/null @@ -1,30 +0,0 @@ ---- -title: "Rust SDK" -slug: "rust-sdk" ---- - -# Aptos Rust SDK - -## Installing Rust SDK - -Aptos provides an official Rust SDK in the [Aptos-core GitHub](https://github.com/aptos-labs/aptos-core/tree/main/sdk) repository. To use the Rust SDK, add the following dependency and patches on the git repo directly in your `Cargo.toml`, like this: - -```toml -[dependencies] -aptos-sdk = { git = "https://github.com/aptos-labs/aptos-core", branch = "devnet" } - -[patch.crates-io] -merlin = { git = "https://github.com/aptos-labs/merlin" } -``` - -You must also create a `.cargo/config.toml` file with this content: -```toml -[build] -rustflags = ["--cfg", "tokio_unstable"] -``` - -The source code for the official Rust SDK is available in the [aptos-core GitHub repository](https://github.com/aptos-labs/aptos-core/tree/main/sdk). - -## Using Rust SDK - -See the [Developer Tutorials](../tutorials/index.md) for code examples showing how to use the Rust SDK. diff --git a/developer-docs-site/docs/sdks/ts-sdk/aptos-client.md b/developer-docs-site/docs/sdks/ts-sdk/aptos-client.md deleted file mode 100644 index 5321e6b5128de..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/aptos-client.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: "AptosClient Class" -slug: "typescript-sdk-aptos-client-class" ---- - -The [AptosClient](https://aptos-labs.github.io/ts-sdk-doc/classes/AptosClient.html) class is a component of the Aptos TypeScript SDK that enables developers to interact with the blockchain network through the use of REST APIs generated from an OpenAPI document. The [OpenAPI specification](https://spec.openapis.org/oas/v3.0.3) helps build and document RESTful APIs by providing a standard format for describing the structure of an API, including the available endpoints, methods, input and output parameters. - -In addition, the `AptosClient` component supports submitting transactions in BCS format, which prepares and signs the raw transactions on the client-side. This method leverages the BCS Library or Transaction Builder for constructing the transaction payloads. - -## Usage - -To use the `AptosClient` class, you will need to create an instance of `AptosClient` and call the desired API method. The `AptosClient` object will handle the HTTP requests and responses and return the result to your application. - -## Configuration - -Before using the `AptosClient` class, you will need to configure it with the necessary parameters. These parameters may include the network endpoint URL, custom configuration, and any other required settings. You can configure the `AptosClient` class by passing in the necessary parameters when you initialize the client object. - -## Initialization - -Here is an example of how to initialize an `AptosClient`: - -```ts -import { AptosClient } from "aptos"; - -const client = new AptosClient("https://fullnode.testnet.aptoslabs.com"); -``` - -## Making API fetch calls - -To make an API call, you will need to call the appropriate method on the `AptosClient` object. The method name and parameters will depend on the specific API you are using. Here is an example: - -```ts -const accountResources = await client.getAccountResources("0x123"); -``` - -In this example, we are using the `getAccountResources()` method to retrieve the resources of an account with the address `0x123`. - -## Submit transaction to chain - -To submit a transaction to the blockchain, you will need to build a transaction payload to be submitted. Here is an example: - -```ts -const alice = new AptosAccount(); - -const payload: Types.EntryFunctionPayload = { - function: "0x123::todolist::create_task", - type_arguments: [], - arguments: ["read aptos.dev"], -}; - -const rawTxn = await client.generateTransaction(alice.address(), payload); -const bcsTxn = AptosClient.generateBCSTransaction(alice, rawTxn); -const transactionRes = await client.submitSignedBCSTransaction(bcsTxn); -``` - -Transaction payload contents: -`function` – This must be a fully qualified function name and composed of `module address`, `module name` and `function name` separated by `::`. -`type_arguments` – This is for the case a Move function expects a generic type argument. -`arguments` – The arguments the function expects. - -:::tip -You can use the `AptosClient` class directly or the [Provider](./sdk-client-layer.md) class (preferred). -::: diff --git a/developer-docs-site/docs/sdks/ts-sdk/index.md b/developer-docs-site/docs/sdks/ts-sdk/index.md deleted file mode 100644 index 9f66f8a89d0b8..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/index.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -title: "TypeScript Index" -slug: "index" -hidden: false ---- - -# Aptos TypeScript SDK - -:::note -This documentation is for the **legacy TypeScript SDK**, aka `aptos`. For a more robust and better SDK support, we recommend upgrading to the **new TypeScript SDK** [@aptos-labs/ts-sdk](https://github.com/aptos-labs/aptos-ts-sdk). Take a look at the [documentation](../new-ts-sdk/index.md) and the [migration guide](../new-ts-sdk/migration-guide.md) -::: - -Aptos provides a fully supported TypeScript SDK with the source code in the [Aptos-core GitHub](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk) repository. Much of the functionality of the TypeScript SDK can be found in the [Rust](../rust-sdk.md) and [Python](../python-sdk.md) SDKs. Nevertheless, Aptos strongly encourages you to use the TypeScript SDK for app development whenever possible. - -## Installing the TypeScript SDK - -1. Make sure you [downloaded the latest precompiled binary for the Aptos CLI](../../tools/aptos-cli/install-cli/index.md#download-precompiled-binary). -2. On a terminal run the below command to install the TypeScript SDK from [npmjs](https://www.npmjs.com/package/aptos): - - ```bash - npm i aptos - ``` - - or - - ```bash - yarn add aptos - ``` - - or - - ```bash - pnpm add aptos - ``` - - :::tip - The above command installs the TS SDK and should not be confused as installing the Aptos CLI. - ::: - -## Using the TypeScript SDK - -See the [Developer Tutorials](../../tutorials/index.md) for code examples showing how to use the Typescript SDK. - -## TypeScript SDK Architecture - -See the [TypeScript SDK Architecture](./typescript-sdk-overview.md) for the components that make up the TypeScript SDK. - -## Additional information - -- ### [TypeScript SDK Source code](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk) -- ### [TypeScript SDK at NPM](https://www.npmjs.com/package/aptos) -- ### [TypeScript SDK Reference](https://aptos-labs.github.io/ts-sdk-doc/) -- ### [TypeScript SDK Reference Source](https://github.com/aptos-labs/ts-sdk-doc) diff --git a/developer-docs-site/docs/sdks/ts-sdk/indexer-client.md b/developer-docs-site/docs/sdks/ts-sdk/indexer-client.md deleted file mode 100644 index d15c31312da0d..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/indexer-client.md +++ /dev/null @@ -1,111 +0,0 @@ ---- -title: "IndexerClient Class" -slug: "typescript-sdk-indexer-client-class" ---- - -The [IndexerClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/providers/indexer.ts) is responsible for handling the communication between the client-side application and the blockchain network. It uses the [Hasura framework](https://hasura.io/) to generate a set of [GraphQL queries](https://cloud.hasura.io/public/graphiql?endpoint=https://indexer.mainnet.aptoslabs.com/v1/graphql) that can be used to retrieve data from the blockchain. The queries are optimized for performance and can retrieve data in real-time. - -## Usage - -To use the `IndexerClient` class, you will need to create an instance of `IndexerClient` and call the desired API method. The `IndexerClient` object will handle the HTTP requests and responses and return the result to your application. - -## Configuration - -Before using the `IndexerClient` class, you will need to configure it with the necessary parameters. These parameters may include the Hasura endpoint URL, custom configuration, and any other required settings. You can configure the `IndexerClient` class by passing in the necessary parameters when you initialize the client object. - -## Initialization - -To initialize the `IndexerClient` class, you will need to pass in the necessary configuration parameters. Here is an example: - -```ts -import { IndexerClient } from "aptos"; - -const client = new IndexerClient("https://indexer-testnet.staging.gcp.aptosdev.com/v1/graphql"); -``` - -## Make API fetch calls - -To make an API call, you will need to call the appropriate method on the `IndexerClient` object. The method name and parameters will depend on the specific API you are using. Here is an example: - -```ts -const accountNFTs = await client.getAccountNFTs("0x123"); -``` - -In this example, we are using the `getAccountNFTs()` method to retrieve the NFT of an account with the address `0x123`. - -## Use custom queries - -The TypeScript SDK provides frequently used queries by different users and/or apps and makes sure the queries are well structured to retrieve the current response. - -With that being said, one can structure custom queries and use the SDK to query the Aptos Indexer API. For that, the SDK exports a `queryIndexer()` method that accepts a `GraphqlQuery` type argument. The `GraphqlQuery` type has a `query` field of type `string` and an optional `variable` field of an `object` type. - -Here is the `GraphqlQuery` type definition. - -```ts -type GraphqlQuery = { - query: string; - variables?: {}; -}; -``` - -To use the `queryIndexer()` method, one should pass the GraphQL query. For example: - -```ts -const query: string = `query getAccountTokensCount($owner_address: String) { - current_token_ownerships_aggregate(where: { owner_address: { _eq: $owner_address }, amount: { _gt: "0" } }) { - aggregate { - count - } - } -}`; -const variables = { owner_address: "0x123" }; -const graphqlQuery = { query, variables }; -const accountTokensCount = await client.queryIndexer(graphqlQuery); -``` - -:::tip -Be aware that it queries the network endpoint you passed in when initializing the `IndexerClient` class. -::: - -## Generate queries - -To generate an Indexer query that can be used within the SDK, we can write a GraphQL query (based on the [Indexer schema](https://cloud.hasura.io/public/graphiql?endpoint=https://indexer.mainnet.aptoslabs.com/v1/graphql)) and use the SDK to generate a TypeScript query. - -### Write an Indexer query - -All Indexer queries, which are basically GraphQL queries, live under the `src/indexer/queries/` folder. In this folder, we create a `.graphql` file for each query we want the SDK to support. For example, a `.graphql` file with a GraphQL query can be: - -```graphql -query getAccountTokensCount($owner_address: String) { - current_token_ownerships_aggregate(where: { owner_address: { _eq: $owner_address }, amount: { _gt: "0" } }) { - aggregate { - count - } - } -} -``` - -### Generate TypeScript queries - -Once we have created a `.graphql` file with a GraphQL query, we can generate TypeScript code based on that query so we can use it with the TypeScript SDK by running the following command: - -```cmd -pnpm run indexer-codegen -``` - -That command runs the `graphql-codegen` command that generates code from the Indexer GraphQL schema based on the SDK configuration file. - -### SDK GraphQL configuration file - -The TypeScript SDK uses a configuration file for [@graphql-codegen](https://the-guild.dev/graphql/codegen), a code generation tool for GraphQL. - -The SDK [configuration file](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/indexer/codegen.yml) defines how `@graphql-codegen` should generate TypeScript code from a GraphQL schema and queries. Let's break down some of the key elements of this file: - -- `schema` – Specifies the location of the GraphQL schema file that `@graphql-codegen` should use for code generation. In this case, it is using the Aptos Indexer `mainnet` schema. -- `documents` - Specifies the location of the GraphQL operation files that `@graphql-codegen` should use for code generation. In this case, it is using the `src/indexer/queries/` location (as mentioned in the previous section) and includes all files with the `.graphql` extension. -- `generates` – Defines the output files that `@graphql-codegen` should generate based on the schema and operations. In this case, it is generating the types, operations and queries. -- `plugins` – Specifies the plugins that `@graphql-codegen` should use for code generation. In this case, it is using the `typescript` plugin to generate TypeScript typings from the GraphQL schema, `typescript-operations` plugin to generate TypeScript typings for GraphQL operations and `typescript-graphql-request` plugin to generate function for making GraphQL requests . - -:::tip -You can use the `IndexerClient` class directly or the [Provider](./sdk-client-layer.md) class (preferred). -::: diff --git a/developer-docs-site/docs/sdks/ts-sdk/sdk-client-layer.md b/developer-docs-site/docs/sdks/ts-sdk/sdk-client-layer.md deleted file mode 100644 index 029c1534e2cf0..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/sdk-client-layer.md +++ /dev/null @@ -1,120 +0,0 @@ ---- -title: "API Client Layer" -slug: "typescript-sdk-client-layer" ---- - -The API client layer in the SDK provides a robust and reliable communication channel between the client-side application and the blockchain server. It is a component of the SDK that enables developers to interact with the network through the use of application programming interfaces (APIs). The client layer is responsible for making API calls to the network, sending and receiving data to and from the network, and handling any errors or exceptions that may occur during the process. - -The client layer is used to communicate with the Aptos REST API the Aptos Indexer API and handling of errors and exceptions. -In addition, the client layer component supports submitting transactions in BCS format, which prepares and signs the raw transactions on the client-side. This method leverages the BCS Library and Transaction Builder for constructing the transaction payloads. - -By following the instructions in this documentation, you should be able to configure and use the client layer in your project - -## Provider class - -The client layer exports a [Provider](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/providers/provider.ts) class that extends both the Aptos REST API and the Aptos Indexer API. -The `Provider` class acts as a mediator between the client-side application and the blockchain server, ensuring reliable and efficient communication. -This class provides a high-level interface for the application to interact with the blockchain server. The class is designed to be easy to use and understand, allowing developers to quickly integrate the SDK into their applications. - -The `Provider` class accepts: - -- `network` - network enum type `mainnet | testnet | devnet` indicates the network the app interacts with. -- `CustomEndpoints` of type `{fullnodeUrl: string, indexerUrl: string}` - this is to support devs who run their own nodes/indexer or to use local development against a local testnet. -- `Config` - an optional argument the AptosClient accepts. -- `doNotFixNodeUrl` - an optional argument the AptosClient accepts. - -## Initializing the Provider class - -To initialize the Provider class, you will need to pass in the necessary configuration parameters. Here is an example: - -```ts -import { Provider, Network } from "aptos"; - -const provider = new Provider(Network.TESTNET); -``` - -## Fetch data from chain - -To make an API call, you will need to call the appropriate method on the Provider class. The method name and parameters will depend on the specific API you are using. Here is an example: - -```ts -const account = await provider.getAccount("0x123"); -``` - -In this example, we are using the `getAccount()` method to retrieve information about an account with the address `0x123`. - -## Submit Transaction to chain - -To submit a transaction to the Aptos network we should: - -1. Generate a raw transaction -2. Sign the generated raw transaction -3. Submit the signed transaction - -### Generate a Raw Transaction - -The TypeScript SDK provides 2 efficient ways to `generate a raw transaction` that can be signed and submitted to chain. - -#### Transaction Builder - -The `generateTransaction()` method, accepts an `entry function payload` type and is available for entry funtion transaction submission. It uses the [TransactionBuilderRemoteABI](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderRemoteABI.html) to fetch the ABI from the blockchain, serializes the payload arguments based on the entry function argument types and generates and return a raw transaction that can be signed and submitted to the blockchain. - -```ts -const alice = new AptosAccount(); - -const payload = { - function: "0x123::todolist::create_task", - type_arguments: [], - arguments: ["read aptos.dev"], -}; - -const rawTxn = await provider.generateTransaction(alice.address(), entryFunctionPayload); -``` - -`function` – This must be a fully qualified function name and composed of `module address`, `module name` and `function name` separated by `::`. -`type_arguments` – This is for the case a Move function expects a generic type argument. -`arguments` – The arguments the function expects. - -:::tip -To submit an entry function payload, using the Transaction Builder would be simpler to use as the developer do not need to deal with BCS serialization. -::: - -#### BCS Transaction - -The `generateRawTransaction()` method, accept `any transaction payload type (entry, script, multisig)` and exepcts for the arguments passed in to be serialized. It then generates and returns a raw transaction that can be signed and submitted to chain. - -```ts -const alice = new AptosAccount(); - -const entryFunctionPayload = new TxnBuilderTypes.TransactionPayloadEntryFunction( - TxnBuilderTypes.EntryFunction.natural("0x123::todolist", "create_task", [], [bcsSerializeStr("read aptos.dev")]), -); - -const rawTxn = await provider.generateRawTransaction(alice.address(), entryFunctionPayload); -``` - -For simplicity, the TypeScript SDK provides a method that can submit a BCS transaction in a one call. - -```ts -const rawTxn = await provider.generateSignSubmitTransaction(alice, entryFunctionPayload); -``` - -### Sign a Raw Transaction - -Once one has generated a raw transaction, they need to sign this transaction with their private key. The TypeScript SDK provides a method that accepts an `aptos account` and `a raw transaction` and signs it. - -```ts -const signedTxn = AptosClient.generateBCSTransaction(alice, rawTxn); -``` - -### Submit transaction to blockchain - -Once a transaction has been signed, it is ready to be submitted to the blockchain. The TypeScript SDK provides a method that accepts the `signed transaction` and submits it to the Aptos network. - -```ts -const transactionRes = await provider.submitSignedBCSTransaction(signedTxn); -``` - -## Learn more - -The Provider class extends both [AptosClient](./aptos-client.md) and [IndexerClient](./indexer-client.md) classes and gives the end user the option to simply create a Provider instance and call a method by hiding the underlying implementation. If, for any reason, you want to use AptosClient or IndexerClient directly without the Provider class, you are able to do it. diff --git a/developer-docs-site/docs/sdks/ts-sdk/sdk-core-layer.md b/developer-docs-site/docs/sdks/ts-sdk/sdk-core-layer.md deleted file mode 100644 index 03ebc9a67f045..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/sdk-core-layer.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "Core Layer" -slug: "typescript-sdk-core-layer" ---- - -The core SDK layer exposes the functionalities needed by most applications: - -- Key generation -- Transaction signing and submission -- Transaction status querying -- Information retrieval techniques - -## Transaction Builder - -The Aptos TypeScript SDK exposes five transaction builder classes: - -- [TransactionBuilder](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilder.html) that takes in a Signing Message (serialized raw transaction) and returns a signature. -- [TransactionBuilderEd25519](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderEd25519.html) extends the TransactionBuilder class and provides a signing method for raw transactions with a single public key. -- [TransactionBuilderMultiEd25519](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderMultiEd25519.html) extends the TransactionBuilder class and provides a signing method for signing a raw transaction with a multisignature public key. -- [TransactionBuilderABI](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderABI.html) builds raw transactions based on ABI. -- [TransactionBuilderRemoteABI](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderRemoteABI.html) downloads JSON ABIs from the fullnodes. It then translates the JSON ABIs to the format accepted by TransactionBuilderABI. - -The Transaction Builder contains the TypeScript types for constructing the transaction payloads. The Transaction Builder within the TypeScript SDK supports the following transaction payloads: - -1. Entry Function -2. Script -3. MultiSig Transaction - -### Generate transaction - -The TypeScript SDK provides 2 efficient ways to generate a raw transaction that can be signed and submitted to chain - -1. Using the `generateTransaction()` method. This methods accepts an `entry function payload` type and is available for entry function transaction submission. It uses the [TransactionBuilderRemoteABI](https://aptos-labs.github.io/ts-sdk-doc/classes/TransactionBuilderRemoteABI.html) to fetch the ABI from chain, serializes the payload arguments based on the entry function argument types and generates and return a raw transaction that can be signed and submitted to chain. -2. Using the `generateRawTransaction()` method. This method accept any transaction payload type (entry, script, multisig) and expects the passed in arguments to be serialized. It then generates and returns a raw transaction that can be signed and submitted to chain. - -In addition, The Aptos SDK supports transaction status queries (success, failure, pending), by transaction hash. - -## AptosAccount class - -The [AptosAccount](https://aptos-labs.github.io/ts-sdk-doc/classes/AptosAccount.html) class has a constructor that creates a new account instance or retrieves an existing account instance. Additionally, this class provides the methods for: - -- Generating [Ed25519](https://ed25519.cr.yp.to/) key pairs. The Ed25519 public keys can be used to derive the chain account addresses, while the private keys should be kept private for transaction signing. -- Signing a bytes buffer with an Ed25519 public key. -- Deriving initial account addresses from the public keys. -- Retrieving a resource account address by source address and seeds. -- Deriving account address, public key, and private key. diff --git a/developer-docs-site/docs/sdks/ts-sdk/sdk-plugins-layer.md b/developer-docs-site/docs/sdks/ts-sdk/sdk-plugins-layer.md deleted file mode 100644 index 679c379260d05..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/sdk-plugins-layer.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: "Plugins Layer" -slug: "typescript-sdk-plugins-layer" ---- - -A plugin is a component that can be added to the TypeScript SDK to extend or enhance its functionality. Plugins are meant to be built to support popular applications on the Aptos network and can be used to add new features, ease the use of the application operations and to customize the user experience. - -## AptosToken class - -The [AptosToken](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/aptos_token.ts) class is compatible with the [Aptos Digital Asset Standard](../../standards/digital-asset.md) and provides methods for creating and querying NFT collections and tokens. -It covers write methods that support creating, transferring, mutating, and burning tokens on-chain. - -The main write methods supported by the AptosToken class are: - -- Create Collection -- Mint -- Mint Soul Bound -- Burn Token -- Freeze Token Transfer -- Unfreeze Token Transfer -- Set Token Description -- Set Token Name -- Set Token URI -- Add Token Property -- Remove Token Property -- Update Token Property -- Add Types Property -- Update Types Property -- Transfer Token Ownership - -## TokenClient class - -The [TokenClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/token_client.ts) class is compatible with the [token V1 standard](../../standards/aptos-token.md) and provides methods for creating and querying the NFT collections and tokens. -It covers (1) write methods that support creating, transferring, mutating, and burning tokens on-chain and (2) read methods performing deserialization and returning data in TypeScript objects. - -The main write methods supported by the TokenClient class are: - -- Create Collection -- Create Token -- Offer Token -- Claim Token -- Directly Transfer Token -- Transfer Token with Opt-in -- Mutate Token Properties -- Burn Token by Owner or Creator - -The main read methods deserializing on-chain data to TypeScript objects are: - -- Get CollectionData -- Get TokenData -- Get Token of an Account - -## FungibleAssetsClient class - -The [FungibleAssetsClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/fungible_asset_client.ts) class is compatible with the [fungible asset component](../../standards/fungible-asset.md) and provides methods to transfer fungible assets between accounts and to check an account balance. - -The main write methods are: - -- Transfer -- Generate Transfer - -The main read methods are: - -- Get Primary Balance - -## CoinClient class - -The [CoinClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/coin_client.ts) class provides methods to interact with the coin module to transfer coins between accounts and to check an account balance. By default it transfers `0x1::aptos_coin::AptosCoin`, but you can specify a different coin type with the `coinType` argument. - -The main methods are: - -- Transfer -- Check Balance - -## FaucetClient class - -The [FaucetClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/faucet_client.ts) class is a thin wrapper for the Aptos faucet that provides a way to funds Aptos accounts. The class provides a request method to request token for an Aptos account. - -The main read methods are: - -- Fund Account - -## ANSClient class - -The [ANSClient](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/plugins/ans_client.ts) class provides methods for creating an ANS name on the Aptos network and querying ANS data. -It covers (1) write methods that support creating a unique identity on the Aptos network by registering an Aptos name and (2) read methods that retrieve an account's ANS name using its address, as well as retrieving an account's address using its ANS name. - -The main write methods supported by the SDK are: - -- Mint an Aptos Name -- Init Reverse Lookup Registry - -The main read methods are: - -- Get Address By Name -- Get Primary Name By Address - -## Build a Plugin - -Developers can also create plugins to extend the functionality of the SDK and to provide users with a better experience. To do that, simply follow these steps: - -1. Create a new `.ts` file under the `src/plugins/` folder and name it `.ts` (e.g. `ans_client`). -2. Create a class with the same `pluginName` (e.g. `AnsClient`) and implement it. -3. Export that file from the `src/plugins/index.ts` file (e.g. `export * from "./ans_client";`). \ No newline at end of file diff --git a/developer-docs-site/docs/sdks/ts-sdk/sdk-tests.md b/developer-docs-site/docs/sdks/ts-sdk/sdk-tests.md deleted file mode 100644 index 486b22d581af9..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/sdk-tests.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -title: "Tests and Validation" -slug: "typescript-sdk-tests" ---- - -The TypeScript SDK uses two types of tests, `e2e` and `unit` tests, located under the `src/tests/` folder: - -- `e2e` tests – End-to-end tests are meant to test the end-to-end operations starting from the SDK methods to the interaction with the REST/Indexer API and a smart contract and up to the blockchain level. For example, to test if a transaction has been submitted, we start with building the transaction payload the SDK expects, post the submit request to the REST API, and fetch the transaction data to make sure it has been fully submitted to the blockchain. -- `unit` tests – Unit tests are meant to test the output of a function in the SDK with the provided input. For example, we can test whether an account address is valid. - -## Validation for the Transaction Builder and BCS - -The [BCS](https://docs.rs/bcs/latest/bcs/) is used to assemble and serialize the transaction payloads for signing and submission. - -Given that different programming languages have different primitive type constraints (e.g., byte length, value range, etc.) and various composite types support (e.g., enum, struct, class, etc.), the code for data serialization is hard to validate. - -The Aptos SDK validates the Transaction Builder and BCS in two ways: - -1. The first level of validation is through unit tests and end-to-end (e2e) tests. - -:::tip - -An example of unit tests for the BCS serializer can be found in [`serializer.test.ts`](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/tests/unit/serializer.test.ts). - -An example of an e2e test for submitting a BCS transaction can be found in [`aptos_client.test.ts`](https://github.com/aptos-labs/aptos-core/blob/f4a7820a61f22ed8306219621402d96f70379d20/ecosystem/typescript/sdk/src/tests/e2e/aptos_client.test.ts#L78). - -::: - -2. The second level of validation is fuzzing tests with test vectors. The test vectors are produced by the same code used by the Aptos blockchain. The test vectors are arrays of JSON objects. Each JSON object contains randomized inputs and the expected outputs. The Aptos SDKs can parse and load test vectors to validate their implementations of Transaction Builder and BCS. - -There are two test vectors. Each covers one type of transaction payload: - -- [EntryFunction](https://github.com/aptos-labs/aptos-core/blob/main/api/goldens/aptos_api__tests__transaction_vector_test__test_entry_function_payload.json) vector -- [Script](https://github.com/aptos-labs/aptos-core/blob/main/api/goldens/aptos_api__tests__transaction_vector_test__test_script_payload.json) vector - -Vector items are self-explanatory. However, a special serialization method is used to save space and avoid data overflow. The details are described below: - -- All account address are hex-coded. -- `args` in EntryFunction is hex-coded. -- U64 and U128 numbers are serialized as string literals to avoid data truncation. -- U8 is serialized as a number (not a string). -- `code` in Script and ModuleBundle are hex-coded. - -:::tip -See the [`transaction_vector.test.ts`](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/typescript/sdk/src/tests/unit/transaction_vector.test.ts) code example for how the TypeScript SDK does vector validation. -::: diff --git a/developer-docs-site/docs/sdks/ts-sdk/typescript-sdk-overview.md b/developer-docs-site/docs/sdks/ts-sdk/typescript-sdk-overview.md deleted file mode 100644 index 85ec9a3e5ba6d..0000000000000 --- a/developer-docs-site/docs/sdks/ts-sdk/typescript-sdk-overview.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: "TypeScript SDK Architecture" -slug: "typescript-sdk-overview" ---- - -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -This document describes the main features and components of the Aptos TypeScript SDK. - -The [Aptos TypeScript SDK](https://github.com/aptos-labs/aptos-core/tree/main/ecosystem/typescript/sdk) provides APIs and interfaces you can use to interact with the Aptos blockchain for reading the blockchain state and for sending your transaction to the Aptos blockchain. - -The Aptos TypeScript SDK has three logical layers: - -1. Client layer – Responsible on communication with the blockchain server. -2. Core layer – Exposes the functionalities needed by most applications. -3. Plugins layer – Implementation of different use cases such as Token, NFT, ANS, etc. - -See below a high-level architecture diagram of the Aptos TypeScript SDK. - - - -## Components of the TypeScript SDK - -- [API Client Layer](./sdk-client-layer.md) -- [Core Layer](./sdk-core-layer.md) -- [Plugins Layer](./sdk-plugins-layer.md) -- [Tests and Validation](./sdk-tests.md) diff --git a/developer-docs-site/docs/sdks/unity-sdk.md b/developer-docs-site/docs/sdks/unity-sdk.md deleted file mode 100644 index 935e76cbd14e0..0000000000000 --- a/developer-docs-site/docs/sdks/unity-sdk.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -title: "Unity SDK" -slug: "unity-sdk" ---- - - -# Aptos Unity SDK - -The [Aptos Unity SDK](https://github.com/aptos-labs/Aptos-Unity-SDK) is a .NET implementation of the [Aptos SDK](./index.md), compatible with .NET Standard 2.0 and .NET 4.x for Unity. The goal of this SDK is to provide a set of tools for developers to build multi-platform applications (mobile, desktop, web, VR) using the Unity game engine and the Aptos blockchain infrastructure. - -See the post [Aptos Labs brings Web3 to Gaming with its new SDK for Unity developers](https://medium.com/aptoslabs/aptos-labs-brings-web3-to-gaming-with-its-new-sdk-for-unity-developers-e6544bdf9ba9) and the [Technical details](https://github.com/aptos-labs/Aptos-Unity-SDK#technical-details) section of the Unity SDK README for all of the features offered to game developers by the Aptos Unity SDK. - -## User flows - -The Aptos Unity SDK supports these use cases: - -- *Progressive onboarding flow* in which users can log into a game by email. In this flow, transactions are proxied, and Aptos uses a distributed key system. The users can then onboard to a full custodial wallet if desired. -- *In-game non-custodial wallet integration* in which game developers have the option to allow users to create full non-custodial wallets in the games. -- *Off-game non-custodial wallet integration* in which game developers may allow users to connect to a desktop wallet or a mobile wallet within the game or create burner wallets from the parent wallet seamlessly. - - -## Prerequisites - -### Supported Unity versions -| Supported Version: | Tested | -| -- | -- | -| 2021.3.x | ✅ | -| 2022.2.x | ✅ | - -| Windows | Mac | iOS | Android | WebGL | -| -- | -- | -- | -- | -- | -| ✅ | ✅ | ✅ | ✅ | ✅ | - -### Dependencies - -> As of Unity 2021.x.x, Newtonsoft Json is a common dependency. Prior versions of Unity require installing Newtonsoft. - -- [Chaos.NaCl.Standard](https://www.nuget.org/packages/Chaos.NaCl.Standard/) -- Microsoft.Extensions.Logging.Abstractions.1.0.0 — required by NBitcoin.7.0.22 -- Newtonsoft.Json -- NBitcoin.7.0.22 -- [Portable.BouncyCastle](https://www.nuget.org/packages/Portable.BouncyCastle) -- Zxing - -## Install the Unity SDK - -You may install the Unity SDK either through our `unitypackage` or the [Unity Package Manager](https://docs.unity3d.com/Manual/Packages.html). - -### Install by `unitypackage` - -1. Start Unity. -2. Download the latest `Aptos.Unity.unitypackage` file from the [Unity Asset Store](https://assetstore.unity.com/packages/decentralization/aptos-sdk-244713). -3. Click **Assets** → **Import Packages** → **Custom Package** and select the downloaded file. - -### Install by Unity Package Manager - -1. Open the [Unity Package Manager](https://docs.unity3d.com/Manual/upm-ui.html) window. -2. Click the add **+** button in the top status bar. -3. Select *Add package from git URL* from the dropdown menu. -4. Enter the URL *https://github.com/aptos-labs/Aptos-Unity-SDK.git* and click **Add**. \ No newline at end of file diff --git a/developer-docs-site/docs/standards/aptos-coin.md b/developer-docs-site/docs/standards/aptos-coin.md deleted file mode 100644 index fd3586ebfec58..0000000000000 --- a/developer-docs-site/docs/standards/aptos-coin.md +++ /dev/null @@ -1,252 +0,0 @@ ---- -title: "Aptos Coin (Legacy)" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Coin (Legacy) - -[Coin](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move) provides a standard, typesafe framework for simple, fungible tokens or coins. - -:::tip -Coin is stored in `0x1::coin`. -::: - -## Structures - -### Reusability - -A coin is defined in Move as: - -```rust -struct Coin has store { - /// Amount of coin this address has. - value: u64, -} -``` - -A Coin uses the `CoinType` to support re-usability of the Coin framework for distinct Coins. For example, `Coin` and `Coin` are two distinct coins. - -### Global store - -Coin also supports a resource for storing coins in global store: - -```rust -struct CoinStore has key { - coin: Coin, - frozen: bool, - deposit_events: EventHandle, - withdraw_events: EventHandle, -} -``` - -Coin information or metadata is stored in global store under the coin creators account: - -```rust -struct CoinInfo has key { - name: string::String, - /// Symbol of the coin, usually a shorter version of the name. - /// For example, Singapore Dollar is SGD. - symbol: string::String, - /// Number of decimals used to get its user representation. - /// For example, if `decimals` equals `2`, a balance of `505` coins should - /// be displayed to a user as `5.05` (`505 / 10 ** 2`). - decimals: u8, - /// Amount of this coin type in existence. - supply: Option, -} -``` - -## Primitives - -Coin provides the primitives for users creating and managing the coin and the users who use it. - -### Creators - -Coin creators and managers can: - -- Initialize a coin and set its metadata and supply monitoring. -- Minting and burning Coin value. -- Burning coins from a `CoinStore`. -- Freezing mobility into and out of a `CoinStore`. - -### Users - -Coin users can: - -- Merging two Coin structs of the same type. -- Extracting value from a Coin struct into a new Coin struct. -- Ability to deposit and withdraw from a `CoinStore` and emit events as a result. -- Allows for users to register a `CoinStore` in their account to handle coin. - -### Coin module key struct - -The following tables describe fields at the struct level. For the definitive list, see the [Aptos Framework](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/overview.md) containing [`coin`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/doc/coin.md). - -##### [`Coin`](https://github.com/aptos-labs/aptos-core/blob/744c2def47cddced878fda9bbd5633022fdb083a/aptos-move/framework/aptos-framework/sources/coin.move#L68) - -| Field | Type | Description | -| --- | --- | --- | -| `value` | u64 | Value of the token, eg: 1000000000 | - -##### [`CoinInfo`](https://github.com/aptos-labs/aptos-core/blob/744c2def47cddced878fda9bbd5633022fdb083a/aptos-move/framework/aptos-framework/sources/coin.move#L92) - -| Field | Type | Description | -| --- | --- | --- | -| `name` | String | Name of the token, eg: Aptos Coin | -| `symbol` | String | Symbol for the token, eg: APT | -| `decimals` | u8 | Determines how the value of coin is represented; for example APT’s decimal is 8, so a value of 100000000 is represented by 1 APT | -| `supply` | Option<OptionalAggregator> | option::some(optional_aggregator::new(MAX_U128, parallelizable)) | - -### Creating a new CoinType - -A coin creator can publish to an on-chain account a new module that defines a struct to represent a new `CoinType`. The coin creator will then call `coin:initialize` from that account to register this as a valid coin, and in return receive back structs that enable calling the functions to burn and mint coins and freeze `CoinStore`s. These will need to be stored in global storage by the creator to manage the use of the coin. - -```rust -public fun initialize( - account: &signer, - name: string::String, - symbol: string::String, - decimals: u8, - monitor_supply: bool, -): (BurnCapability, FreezeCapability, MintCapability) { -``` - -The creator has the opportunity to define a name, symbol, decimals, and whether or not the total supply for the coin is monitored. The following applies: - -- The first three of the above (`name`, `symbol`, `decimals`) are purely metadata and have no impact for on-chain applications. Some applications may use decimal to equate a single Coin from fractional coin. -- Monitoring supply (`monitor_supply`) helps track total coins in supply. However, due to the way the parallel executor works, turning on this option will prevent any parallel execution of mint and burn. If the coin will be regularly minted or burned, consider disabling `monitor_supply`. - -### Minting Coins - -If the creator or manager would like to mint coins, they must retrieve a reference to their `MintCapability`, which was produced in the `initialize`, and call: - -```rust -public fun mint( - amount: u64, - _cap: &MintCapability, -): Coin acquires CoinInfo { -``` - -This will produce a new Coin struct containing a value as dictated by the `amount`. If supply is tracked, then it will also be adjusted. - -### Burning Coins - -If the creator or manager would like to burn coins, they must retrieve a reference to their `BurnCapability`, which was produced in the `initialize`, and call: - -```rust -public fun burn( - coin: Coin, - _cap: &BurnCapability, -) acquires CoinInfo { -``` - -The creator or manager can also burn coins from a `CoinStore`: - -```rust -public fun burn_from( - account_addr: address, - amount: u64, - burn_cap: &BurnCapability, -) acquires CoinInfo, CoinStore { -``` - -:::tip burn vs burn_from -The function `burn` eliminates the total value stored in the coin, while `burn_from` only eliminates a given amount of value from a `CoinStore`. If supply is tracked, then it will also be adjusted. - -Burning coins from an account does not emit a `WithdrawEvent` as the `withdraw` function does. -::: - - - -### Freezing Accounts - -If the creator or manager would like to freeze a `CoinStore` on a specific account, they must retrieve a reference to their `FreezeCapability`, which was produced in `initialize`, and call: - -```rust -public entry fun freeze_coin_store( - account_addr: address, - _freeze_cap: &FreezeCapability, -) acquires CoinStore { -``` - -### Merging Coins - -Two coins of the same type can be merged into a single Coin struct that represents the accumulated value of the two coins independently by calling: - -```rust -public fun merge( - dst_coin: &mut Coin, - source_coin: Coin, -) { -``` - -### Extracting Coins - -A Coin can have value deducted to create another Coin by calling: - -```rust -public fun extract( - coin: &mut Coin, - amount: u64, -): Coin { -``` - -### Withdrawing Coins from CoinStore - -A holder of a `CoinStore` can extract a Coin of a specified value by calling: - -```rust -public fun withdraw( - account: &signer, - amount: u64, -): Coin acquires CoinStore { -``` - -:::tip -This function will emit a `WithdrawEvent`. -::: - -### Depositing Coins into CoinStore - -Any entity can deposit coins into an account’s `CoinStore` by calling: - -```rust -public fun deposit( - account_addr: address, - coin: Coin, -) acquires CoinStore { -``` -:::tip -This function will emit a `DepositEvent`. -::: - -### Transferring Coins - -A holder of a `CoinStore` can directly transfer coins from their account to another account’s `CoinStore` by calling: - -```rust -public entry fun transfer( - from: &signer, - to: address, - amount: u64, -) acquires CoinStore { -``` - -:::tip -This will emit both a `WithdrawEvent` and `DepositEvent` on the respective `CoinStore`s. -::: - -## Events - -```rust -struct DepositEvent has drop, store { - amount: u64, -} -``` - -```rust -struct WithdrawEvent has drop, store { - amount: u64, -} -``` diff --git a/developer-docs-site/docs/standards/aptos-object.md b/developer-docs-site/docs/standards/aptos-object.md deleted file mode 100644 index 21687041eec0d..0000000000000 --- a/developer-docs-site/docs/standards/aptos-object.md +++ /dev/null @@ -1,243 +0,0 @@ ---- -title: Object ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Object -The [Object model](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/object.move) allows Move to represent a complex type as a set of resources stored within a single address and offers a rich capability model that allows for fine-grained resource control and ownership management. - -In the object model, an NFT or token can place common token data within a Token resource, object data within an ObjectCore resource, and then specialize into additional resources as necessary. For example, a Player object could define a player within a game and be an NFT at the same time. The ObjectCore itself stores both the address of the current owner and the appropriate data for creating event streams. - -## Comparison with the account resources model -The existing Aptos data model emphasizes the use of the store ability within Move. Store allows for a struct to exist within any struct that is stored on-chain. As a result, data can live anywhere within any struct and at any address. While this provides great flexibility it has many limitations: - -1. Data is not be guaranteed to be accessible, for example, it can be placed within a user-defined resource that may violate expectations for that data, e.g., a creator attempting to burn an NFT put into a user-defined store. This can be confusing to both the users and creators of this data. -2. Data of differing types can be stored to a single data structure (e.g., map, vector) via `any`, but for complex data types `any` incurs additional costs within Move as each access requires deserialization. It also can lead to confusion if API developers expect that a specific any field changes the type it represents. -3. While resource accounts allow for greater autonomy of data, they do so inefficiently for objects and do not take advantage of resource groups. -4. Data cannot be recursively composable, because Move currently prohibits recursive data structures. Furthermore, experience suggests that true recursive data structures can lead to security vulnerabilities. -5. Existing data cannot be easily referenced from entry functions, for example, supporting string validation requires many lines of code. Attempting to make tables directly becomes impractical as keys can be composed of many types, thus specializing to support within entry functions becomes complex. -6. Events cannot be emitted from data but from an account that may not be associated with the data. -7. Transferring logic is limited to the APIs provided in the respective modules and generally requires loading resources on both the sender and receiver adding unnecessary cost overheads. - -:::tip -Object is a core primitive in Aptos Move and created via the object module at 0x1::object -::: - -## Structure -An object is stored in the ObjectGroup resource group, which enables other resources within the object to be co-located for data locality and data cost savings. It's important to note that not all resources within an object need to be co-located within the ObjectGroup, and it's up to the developer of an object to determine their data layout. - -### Object resource group - -Object is a container for resources that are stored within a single address. These resources usually represent related data often accessed together and should be stored within a single address for data locality and cost savings. -When created, an object has a resource group, ObjectGroup, by default: -```rust -#[resource_group(scope = global)] -struct ObjectGroup { } -``` - -Each object also has the core ObjectCore resource with fundamental properties: -```rust -#[resource_group_member(group = aptos_framework::object::ObjectGroup)] -struct ObjectCore has key { - /// Used by guid to guarantee globally unique objects and create event streams - guid_creation_num: u64, - /// The address (object or account) that owns this object - owner: address, - /// Object transferring is a common operation, this allows for disabling and enabling - /// transfers. Bypassing the use of a the TransferRef. - allow_ungated_transfer: bool, - /// Emitted events upon transferring of ownership. - transfer_events: event::EventHandle, -} -``` - -After creating an object, creators can extend with additional resources. For example, an exchange can create an object for each of its liquidity pools and add a resource to track the pool's liquidity. -```rust -#[resource_group_member(group = aptos_framework::object::ObjectGroup)] -struct LiquidityPool has key { - token_a: Object, - token_b: Object, - reserves_a: u128, - reserves_b: u128 -} -``` - -In the above code, `token_a` and `token_b` are references to other objects. Specifically, `Object` is a reference to an object stored at a given address that contains `T` resource. In this example, they're fungible assets (similar to coins). This is covered in more detail in the [Aptos Fungible Asset Standard](./fungible-asset.md). -LiquidityPool resource is part of the ObjectGroup resource group. This means that the LiquidityPool resource is stored in the same storage slot as the ObjectCore resource. This is more storage and gas efficient for reading and writing data. - -LiquidityPool resource can be added during construction of the object: -```rust -use aptos_framework::object::{Self, Object}; -use aptos_framework::fungible_asset::FungibleAsset; - -public fun create_liquidity_pool( - token_a: Object, - token_b: Object, - reserves_a: u128, - reserves_b: u128 -): Object { - let exchange_signer = &get_exchange_signer(); - let liquidity_pool_constructor_ref = &object::create_object_from_account(exchange_signer); - let liquidity_pool_signer = &object::generate_signer(liquidity_pool_constructor_ref); - move_to(liquidity_pool_signer, LiquidityPool { - token_a: token_a, - token_b: token_b, - reserves_a: reserves_a, - reserves_b: reserves_b - }); - object::object_from_constructor_ref(liquidity_pool_constructor_ref) -} -``` - -More resources can also be added post-creation if the exchange module stores the ExtendRef. This is covered in more detail in the Capabilities section. - -## Object Lifecycle -### Creation -Objects can be created via several different functions provided in the object module: -```rust -/// Create a new named object and return the ConstructorRef. Named objects can be queried globally -/// by knowing the user generated seed used to create them. Named objects cannot be deleted. -public fun create_named_object(creator: &signer, seed: vector): ConstructorRef; - -/// Create a new object from a GUID generated by an account. -public fun create_object_from_account(creator: &signer): ConstructorRef; - -/// Create a new object from a GUID generated by an object. -public fun create_object_from_object(creator: &signer): ConstructorRef; -``` - -These functions generate object addresses in different schemas: -1. `create_named_object` generates an address from the caller-provided seed and creator address. This is a deterministic address that can be queried globally. The formula used is sha3(creator address + seed + 0xFD). -2. `create_object_from_account` generates an address from the caller's address and a GUID generated by the caller's account. The formula used is sha3(creator address + account guid + 0xFD). -3. `create_object_from_object` generates an address from the caller's address and a GUID generated by the caller's object. The formula used is sha3(creator address + object guid + 0xFD). -The domain separation ensures there's no conflict among objects created via these different functions. - -Note that since named objects have deterministic addresses, they cannot be deleted. This is to prevent a malicious user from creating an object with the same seed as a named object and deleting it. - -### Object capabilities (refs) -The object creation functions all return a transient ConstructorRef that cannot be stored. ConstructorRef allows adding resources to an object (see example from the previous section). -ConstructorRef can also be used to generate the other capabilities (or "refs") that are used to manage the object: -```rust -/// Generates the DeleteRef, which can be used to remove Object from global storage. -public fun generate_delete_ref(ref: &ConstructorRef): DeleteRef; - -/// Generates the ExtendRef, which can be used to add new events and resources to the object. -public fun generate_extend_ref(ref: &ConstructorRef): ExtendRef; - -/// Generates the TransferRef, which can be used to manage object transfers. -public fun generate_transfer_ref(ref: &ConstructorRef): TransferRef; - -/// Create a signer for the ConstructorRef -public fun generate_signer(ref: &ConstructorRef): signer; -``` -These refs can be stored and used to manage the object. - -DeleteRef can be used to delete the object: -```rust -use aptos_framework::object::{Object, DeleteRef}; - -struct DeleteRefStore has key { - delete_ref: DeleteRef, -} - -public fun delete_liquidity_pool(liquidity_pool: Object) { - let liquidity_pool_address = object::object_address(liquidity_pool); - // Remove all resources added to the liquidity pool object. - let LiquidityPool { - token_a: _, - token_b: _, - reserves_a: _, - reserves_b: _ - } = move_from(liquidity_pool_address); - let DeleteRefStore { delete_ref } = move_from(liquidity_pool_address); - // Delete the object itself. - object::delete_object(delete_ref); -} -``` - -ExtendRef can be used to add resources to the object like the LiquidityPool resource in the previous section: -TransferRef can be used to disable owner-transfer when `ungated_transfer_allowed = true` or to forcefully transfer the object without the owner being involved: -```rust -use aptos_framework::object::{Object, TransferRef}; - -struct TransferRefStore has key { - transfer_ref: TransferRef, -} - -public fun disable_owner_transfer(liquidity_pool: Object) { - let liquidity_pool_address = object::object_address(liquidity_pool); - let transfer_ref = &borrow_global_mut(liquidity_pool_address).transfer_ref; - object::disable_ungated_transfer(transfer_ref); -} - -public fun creator_transfer(liquidity_pool: Object, new_owner: address) { - let liquidity_pool_address = object::object_address(liquidity_pool); - let transfer_ref = &borrow_global_mut(liquidity_pool_address).transfer_ref; - object::transfer_with_ref(object::generate_linear_transfer_ref(transfer_ref), new_owner); -} -``` - -Once the resources have been created on an object, they can be modified by the creator modules without the refs/ Example: -```rust -public entry fun modify_reserves(liquidity_pool: Object) { - let liquidity_pool = &mut borrow_global_mut(liquidity_pool); - liquidity_pool.reserves_a = liquidity_pool.reserves_a + 1000; -} -``` - -### Object reference -A reference to the object can be generated any time and stored in a resource as part of an object or account: -```rust -/// Returns the address of within a ConstructorRef -public fun object_from_constructor_ref(ref: &ConstructorRef): Object; -``` -`Object` is a reference around the object address with the guarantee that `T` exists when the reference is created. For example, we can create an `Object` for a liquidity pool object. -Creating an object reference with a non-existent `T` will fail at runtime. -Note that after references are created and stored, they do not guarantee that the resource `T` or the entire object itself has not been deleted. - -### Events -Objects come with transfer_events by default, which are emitted when the object is transferred. Transfer events are stored in the ObjectCore resource. - -Additionally, similar to account resources, events can be added in an object' resources. The object module offers the following functions to create event handles for objects: -```rust -/// Create a guid for the object, typically used for events -public fun create_guid(object: &signer): guid::GUID; - -/// Generate a new event handle. -public fun new_event_handle(object: &signer): event::EventHandle; -``` - -These event handles can be stored in the custom resources added to the object. Example: -```rust -struct LiquidityPoolEventStore has key { - create_events: event::EventHandle, -} - -struct CreateLiquidtyPoolEvent { - token_a: address, - token_b: address, - reserves_a: u128, - reserves_b: u128, -} - -public entry fun create_liquidity_pool_with_events() { - let exchange_signer = &get_exchange_signer(); - let liquidity_pool_constructor_ref = &object::create_object_from_account(exchange_signer); - let liquidity_pool_signer = &object::generate_signer(liquidity_pool_constructor_ref); - let event_handle = object::new_event_handle(liquidity_pool_signer); - event::emit(event_handle, CreateLiquidtyPoolEvent { - token_a: token_a, - token_b: token_b, - reserves_a: reserves_a, - reserves_b: reserves_b, - }); - let liquidity_pool = move_to(liquidity_pool_signer, LiquidityPool { - token_a: token_a, - token_b: token_b, - reserves_a: reserves_a, - reserves_b: reserves_b, - create_events: event_handle, - }); -} -``` diff --git a/developer-docs-site/docs/standards/aptos-token.md b/developer-docs-site/docs/standards/aptos-token.md deleted file mode 100644 index fc2ec3f9ee142..0000000000000 --- a/developer-docs-site/docs/standards/aptos-token.md +++ /dev/null @@ -1,247 +0,0 @@ ---- -title: "Aptos Token (Legacy)" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Token (Legacy) - -:::tip Aptos Token standards compared -Also see the [comparison of Aptos Token standards](../guides/nfts/aptos-token-overview.md). -::: - - -## Overview of NFT - -An [NFT](https://en.wikipedia.org/wiki/Non-fungible_token) is a non-fungible [token](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token.move) or data stored on a blockchain that uniquely defines ownership of an asset. NFTs were first defined in [EIP-721](https://eips.ethereum.org/EIPS/eip-721) and later expanded upon in [EIP-1155](https://eips.ethereum.org/EIPS/eip-1155). NFTs are typically defined using the following properties: - -- `name`: The name of the asset. It must be unique within a collection. -- `description`: The description of the asset. -- `uri`: A URL pointer to off-chain for more information about the asset. The asset could be media such as an image or video or more metadata. -- `supply`: The total number of units of this NFT. Many NFTs have only a single supply, while those that have more than one are referred to as editions. - -Additionally, most NFTs are part of a collection or a set of NFTs with a common attribute, for example, a theme, creator, or minimally contract. Each collection has a similar set of attributes: - -- `name`: The name of the collection. The name must be unique within the creator's account. -- `description`: The description of the collection. -- `uri`: A URL pointer to off-chain for more information about the asset. The asset could be media such as an image or video or more metadata. -- `supply`: The total number of NFTs in this collection. -- `maximum`: The maximum number of NFTs that this collection can have. If `maximum` is set to 0, then supply is untracked. - -## Design principles - -The [Aptos token standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token.move) is developed with the following principles: - -- **Interoperability**: Provide a standard implementation to improve interoperability across the ecosystem projects. Moreover, Move being a static language without dynamic dispatch makes this principle even more imperative. -- **Liquidity**: Achieve maximal liquidity by defining the NFT, fungible (non-decimal) and semi-fungible tokens in one contract. These different types of tokens can be easily stored, transferred and transacted in the same way. As a consequence, it becomes easier to achieve maximal interoperability across the marketplaces, exchanges, and other methods of exchange. -- **Rich on-chain token properties**: Enable the customization of on-chain token properties. Users can define their own properties and store them on-chain. This can potentially eliminate the need for the off-chain metadata. -- **Reduced overhead**: Reduce the cost of creating large amounts of NFTs from fungible tokens. This can lead to, for example, reduced overhead for similar tokens by the reuse of on-chain metadata for certain fungible tokens. - -:::tip Fungible token → NFT -The Aptos token standard supports [mutation of a fungible token to an NFT](#evolving-from-fungible-token-to-nft). -::: - -### Storing customized token properties on-chain - -The Aptos token standard uses the [`PropertyMap`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/property_map.move) module to store on-chain properties of tokens. `PropertyMap` maps a string key to a property value on-chain, which stores the value in Binary Canonical Serialization (BCS) format and its type. Currently, only primitive types (bool, u8, u64, u128, address and String) are supported in `PropertyMap`. Applications, such as [Aptos Names](https://www.aptosnames.com/), define application specific properties that are read and written by the applications smart contract. - -#### Default properties - -You can add your properties to [`default_properties`](https://github.com/aptos-labs/aptos-core/blob/e62fd09cb1c916d857fa655b3f174991ef8698b3/aptos-move/framework/aptos-token/sources/token.move#L98) in the TokenData. The properties defined here are shared by all tokens by default. - -The `default_properties` field is a key-value store with type information. It leverages the PropertyMap module which contain functions for serializing and deserializing different primitive types to the property value. - -#### Token properties - -You can also use the `token_properties` defined in the token itself for customization on-chain. You can create customized values for a property of this specific token, thereby allowing a token to have a different property value from its default. - -Note that limits exist to storing customized token properties on-chain, namely 1000 properties per token with field names limited to 128 characters. - -### Evolving from fungible token to NFT - -Fungible tokens share the same default property values. However, these property values can evolve over time and become different from each other. To support such evolution of token properties, the Aptos token standard provides the `property_version` field. Here is how it works: - -- During the token creation (minting), all tokens initially have `property_version` set to `0` and these tokens can be stacked together as fungible token. -- When the creators mutate the default properties of a token, the mutated token will be assigned a unique `property_version` to create a new [`token_id`](https://github.com/aptos-labs/aptos-core/blob/bba1690d7268759bd86ccd7459d7967172f1da24/aptos-move/framework/aptos-token/sources/token.move#L288) to differentiate it from other fungible tokens. This unique `token_id` allows the token to have its own property values, and all further mutation of this token does **not** change the `property_version` again. This token essentially becomes an NFT now. - -#### Configuring mutability - -To make mutability explicit for both the creator and owner, the Aptos token standard provides [`mutability_config`](https://github.com/aptos-labs/aptos-core/blob/bba1690d7268759bd86ccd7459d7967172f1da24/aptos-move/framework/aptos-token/sources/token.move#L100) at both the collection level and the token level to control which fields are mutable. Configurable here means the creator can configure this field to be mutable or immutable during creation. - -### Storing metadata off-chain - -Follow the standard below to ensure your NFT can be correctly displayed by various wallets. - -You should store the metadata in a JSON file located in an off-chain storage solution such as [arweave](https://www.arweave.org/) and provide the URL to the JSON file in the `uri` field of the token or the collection. We recommend the developers follow the [ERC-1155 off-chain data](https://eips.ethereum.org/EIPS/eip-1155) schema to format their JSON files. -```json -{ - "image": "https://www.arweave.net/abcd5678?ext=png", - "animation_url": "https://www.arweave.net/efgh1234?ext=mp4", - "external_url": "https://petra.app/", - "attributes": [ - { - "trait_type": "web", - "value": "yes" - }, - { - "trait_type": "mobile", - "value": "yes" - }, - { - "trait_type": "extension", - "value": "yes" - } - ], - "properties": { - "files": [ - { - "uri": "https://www.arweave.net/abcd5678?ext=png", - "type": "image/png" - }, - { - "uri": "https://watch.videodelivery.net/9876jkl", - "type": "unknown", - "cdn": true - }, - { - "uri": "https://www.arweave.net/efgh1234?ext=mp4", - "type": "video/mp4" - } - ], - "category": "video", - } -} -``` -* `image`: URL to the image asset. You may use the `?ext={file_extension}` query to provide information on the file type. -* `animation_url`: URL to the multimedia attachment of the asset. You may use the same `file_extension` query to provide the file type. -* `external_url`: URL to an external website where the user can also view the image. -* `attributes` - Object array, where an object should contain `trait_type` and `value` fields. `value` can be a string or a number. -* `properties.files`: Object array, where an object should contain the URI and type of the file that is part of the asset. The type should match the file extension. The array should also include files specified in `image` and `animation_url` fields, as well as any other files associated with the asset. You may use the `?ext={file_extension}` query to provide information on the file type. -* `properties.category`: Has supported categories: - * `image` - PNG, GIF, JPG - * `video` - MP4, MOV - * `audio` - MP3, FLAC, WAV - * `vr` - 3D models; GLB, GLTF - * `html` - HTML pages; scripts and relative paths within the HTML page are also supported - -You can also host your files on CDN to provide faster loading time by using the `cdn` flag in the file object. -When the file exists, this should be the primary location to read the media file (`video`, `audio`, `vr`) by wallet. -If the file is no longer available, the wallet can fall back to use the `animation_url` to load the file. -```json -"properties": { - "files": [ - ... - { - "uri": "https://watch.videodelivery.net/52a52c4a261c88f19d267931426c9be6", - "type": "unknown", - "cdn": true - }, - ... - ] -} -``` - -## Token data model - -The [following diagram](/img/docs/aptos-token-standard-flow.svg) depicts the flow of token data through Aptos. - - - -## Token resources - -As shown in the diagram above, token-related data are stored at both the creator’s account and the owner’s account. - -### Struct-level resources - -The following tables describe fields at the struct level. For the definitive list, see the [Aptos Token Framework](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/overview.md). - -#### Resource stored at the creator’s address - -| Field | Description | -| --- | --- | -| [`Collections`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#resource-collections) | Maintains a table called `collection_data`, which maps the collection name to the `CollectionData`. It also stores all the `TokenData` that this creator creates. | -| [`CollectionData`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#struct-collectiondata) | Stores the collection metadata. The supply is the number of tokens created for the current collection. The maximum is the upper bound of tokens in this collection. | -| [`CollectionMutabilityConfig`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_CollectionMutabilityConfig) | Specifies which field is mutable. | -| [`TokenData`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_TokenData) | Acts as the main struct for holding the token metadata. Properties is a where users can add their own properties that are not defined in the token data. Users can mint more tokens based on the `TokenData`, and those tokens share the same `TokenData`. | -| [`TokenMutabilityConfig`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_TokenMutabilityConfig) | Controls which fields are mutable. | -| [`TokenDataId`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_TokenDataId) | An ID used for representing and querying `TokenData` on-chain. This ID mainly contains three fields including creator address, collection name and token name. | -| [`Royalty`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_Royalty) | Specifies the denominator and numerator for calculating the royalty fee. It also has the payee account address for depositing the royalty. | -| `PropertyValue` | Contains both value of a property and type of property. | - -#### Resource stored at the owner’s address - -| Field | Description | -| --- | --- | -| [`TokenStore`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_TokenStore) | The main struct for storing the token owned by this address. It maps `TokenId` to the actual token. | -| [`Token`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_Token) | The amount is the number of tokens. | -| [`TokenId`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_TokenId) | `TokenDataId` points to the metadata of this token. The `property_version` represents a token with mutated `PropertyMap` from `default_properties` in the `TokenData`. | - -For more detailed descriptions, see [Aptos Token Framework](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/overview.md). - -## Token lifecycle - -### Token creation - -Every Aptos token belongs to a collection. The developer first needs to create a collection through `create_collection_script` and then create the token belonging to the collection `create_token_script`. -To achieve parallel `TokenData` and `Token` creation, a developer can create unlimited collection and `TokenData` where the `maximum` of the collection and `TokenData` are set as 0. With this setting, the token contract won’t track the supply of types of token (`TokenData` count) and supply of token within each token type. As the result, the `TokenData` and token can be created in parallel. - -Aptos also enforces simple validation of the input size and prevents duplication: -* Token name - unique within each collection -* Collection name - unique within each account -* Token and collection name length - smaller than 128 characters -* URI length - smaller than 512 characters -* Property map - can hold at most 1000 properties, and each key should be smaller than 128 characters - -### Token mutation - -Our standard supports mutation with a principle that the mutable fields are specified during the token creation. This allows the token owner to be informed which fields are mutable when they get the token from the creator. -Our contract uses `CollectionMutabilityConfig` to check if a field is mutable. Our contract uses `TokenMutabilityConfig` to check if a `TokenData` field is mutable. - -For mutation of properties, we have both -* `default_properties` stored in `TokenData` shared by all tokens belonging to the `TokenData` -* `token_properties` stored in the token itself - -To mutate `default_properties`, developers can use `mutate_tokendata_property` to mutate the properties when `TokenMutabilityConfig` is set to `true`. - -> **CAUTION: Set the `TokenMutabilityConfig` field to `false` unless it is absolutely necessary. Allowing `default_properties` to be mutable provides creators too much power; creators can change the burnable config to provide themselves the authority to burn tokens after token creation.** - -To mutate `token_properties` stored in the token, our standard uses the `TOKEN_PROPERTY_MUTABLE` property stored in `default_properties`. When the creator creates the `TokenData` with the `TOKEN_PROPERTY_MUTABLE` property set to `true`, the creator can mutate `token_properties`. Note that if the `mutate_tokendata_property` is set to `true`, creators can mutate the `token_properties` anyway since they can overwrite the setting in `default_properties`. - -### Token burn - -We provide `burn` and `burn_by_creator` functions for token owners and token creators to burn (or destroy) tokens. However, these two functions are also guarded by configs that are specified during the token creation so that both creator and owner are clear on who can burn the token. -Burn is allowed only when the `BURNABLE_BY_OWNER` property is set to `true` in `default_properties`. Burn by creator is allowed when `BURNABLE_BY_CREATOR` is `true` in `default_properties`. -Once all the tokens belonging to a `TokenData` are burned, the `TokenData` will be removed from the creator’s account. Similarly, if all `TokenData` belonging to a collection are removed, the `CollectionData` will be removed from the creator’s account. - -### Token transfer - -We provide three different modes for transferring tokens between the sender and receiver. - -#### Two-step transfer - -To protect users from receiving undesired NFTs, they must be first offered NFTs, and then accept the offered NFTs. Then only the offered NFTs will be deposited in the users' token stores. This is the default token transfer behavior. For example: -1. If Alice wants to send Bob an NFT, she must first offer Bob this NFT. This NFT is still stored under Alice’s account. -2. Only when Bob claims the NFT, will the NFT be removed from Alice’s account and stored in Bob’s token store. - -:::tip Token transfer module -The token transfer is implemented in the [`token_transfers`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token_transfers.move) module. -::: - -#### Transfer with opt-in - -If a user wants to receive direct transfer of the NFT, skipping the initial steps of offer and claim, then the user can call [`opt_in_direct_transfer`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_opt_in_direct_transfer) to allow other people to directly transfer the NFTs into the user's token store. After opting into direct transfer, the user can call [`transfer`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#0x3_token_transfer) to transfer tokens directly. For example, Alice and anyone can directly send a token to Bob's token store once Bob opts in. - -:::tip Turning off direct transfer -The user can also turn off this direct transfer behavior by calling the same `opt_in_direct_transfer` function to reset to the default behavior. -::: - -#### Multi-agent transfer - -The sender and receiver can both sign a transfer transaction to directly transfer a token from the sender to receiver [`direct_transfer_script`](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/doc/token.md#function-direct_transfer_script). For example, once Alice and Bob both sign the transfer transaction, the token will be directly transferred from Alice's account to Bob. - diff --git a/developer-docs-site/docs/standards/digital-asset.md b/developer-docs-site/docs/standards/digital-asset.md deleted file mode 100644 index 306818a1dc499..0000000000000 --- a/developer-docs-site/docs/standards/digital-asset.md +++ /dev/null @@ -1,388 +0,0 @@ ---- -title: "Aptos Digital Asset Standard" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Digital Asset Standard - -## Overview of NFTs -An [NFT](https://en.wikipedia.org/wiki/Non-fungible_token) is a non-fungible [token](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/token.move) -or data stored on a blockchain that uniquely defines ownership of an asset. NFTs were first defined in -[EIP-721](https://eips.ethereum.org/EIPS/eip-721) and later expanded upon in [EIP-1155](https://eips.ethereum.org/EIPS/eip-1155). -NFTs are typically defined using the following properties: - -- `name`: The name of the asset. It must be unique within a collection. -- `description`: The description of the asset. -- `uri`: A URL pointer to off-chain for more information about the asset. The asset could be media such as an image or video or more metadata. -- `supply`: The total number of units of this NFT. Many NFTs have only a single supply, while those that have more than one are referred to as editions. - -Additionally, most NFTs are part of a collection or a set of NFTs with a common attribute, for example, a theme, creator, -or minimally contract. Each collection has a similar set of attributes: - -- `name`: The name of the collection. The name must be unique within the creator's account. -- `description`: The description of the collection. -- `uri`: A URL pointer to off-chain for more information about the asset. The asset could be media such as an image or video or more metadata. -- `supply`: The total number of NFTs in this collection. -- `maximum`: The maximum number of NFTs that this collection can have. If `maximum` is set to 0, then supply is untracked. - -## Design principles -The [Aptos Digital Asset Standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/token.move) was developed with the following as an improvement on the Aptos Token standard. It has these ideas in mind: -* **Flexibility** - NFTs are flexible and can be customized to accommodate any creative designs. -* **Composability** - Multiple NFTs can be easily composed together, such that the final object is greater than the sum of its parts -* **Scalability** - Greater parallelism between tokens - -The base token only provides minimal functionalities and is meant to build upon to add more functionalities. All of its -functions are non-entry and thus not callable directly from off chain. Creators need to write their own modules that use -these fuctionalities or use "no code" solutions also provided in the framework. One such solution is [aptos_token](#aptos-token) -which provides functionalities such as custom metadata (via PropertyMap) and soul bound. - -## Comparison to the legacy Aptos Token Standard - -Digital Asset uses Aptos [objects](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/object.move) -rather than account resources traditionally used in Move. This allows for storing data outside the account and adding -flexibility in this way. -* Tokens can be easily extended with custom data and functionalities without requiring any changes in the framework -* Transfers are simply a reference update -* Direct transfer is allowed without an opt in -* NFTs can own other NFTs adding easy composability -* Soul bound tokens can be easily supported - -## Collections and tokens as objects -In this Token standard, both collections and tokens will be separate [objects](./aptos-object.md). They have their own -distinct addresses and can be referenced both on and off chain by address. Each object can contain multiple resources -so collections and tokens are extensible by default, allowing the creator to add custom data and functionalities without -having to modify the framework. - -On chain, another struct can include a reference to the collection or token objects like below: -```rust -struct ReferenceExample has key { - my_collection: Object, - my_token: Object, -} -``` -where both `my_collection` and `my_token` are addresses (with `Object<>` wrapper). - -Off-chain, the address of the object can be passed along to replace object arguments in entry functions called by transaction creation. -as arguments. For example: -```rust -public entry fun my_function(my_collection: Object) { - // Do something with the collection -} -``` - -Collection and token addresses will also be used to query data such as fetching all resources via fullnode API or against -an indexing service. - -### Royalties -Following the object extensibility pattern, royalties are added to collections or tokens as a resource with associated -functionality provided by [the royalty module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/royalty.move) -Royalty can be updated as long as a `MutatorRef`, a storable struct that grants permissions, is generated at creation -time and stored. - -See [Aptos Token](#aptos-token) for examples on how Royalty's `MutatorRef` can be stored and used. -Royalty can also be set directly on a token if it has a different royalty config than the collection's. - -## Token lifecycle -All Digital Asset modules are deployed at the reserved framework address `0x4`. - -### Collection creation -Every token belongs to a collection. The developer first needs to create a collection with: -1. A fixed maximum supply. Current supply is tracked and cannot exceed the maximum set. -```rust -use aptos_token_objects::collection; - -public entry fun create_collection(creator: &signer) { - let max_supply = 1000; - collection::create_fixed_collection( - creator, - "My Collection Description", - max_supply, - "My Collection", - royalty, - "https://mycollection.com", - ); -} -``` -2. Unlimited supply. Current supply is still tracked but there's no maximum enforced. -```rust -public entry fun create_collection(creator: &signer) { - collection::create_unlimited_collection( - creator, - "My Collection Description", - "My Collection", - royalty, - "https://mycollection.com", - ); -} -``` -Note that both track the current supply. Maximum supply cannot be changed after the collection is created, and a -collection cannot be converted from unlimited to fixed supply or vice versa. - -A collection has the following attributes: -* Collection name - unique within each account. This means a single creator account cannot create more than one -collection with the same name. -* Description - modifiable with a `MutatorRef` and smaller than 2048 characters -* URI length - modifiable with a `MutatorRef` and smaller than 512 characters -* Royalty - specifies how many % of the sale price goes to the creator of the collection. This can be changed with a -`MutatorRef` generated by the Royalty module. - -A `MutatorRef`, a storable struct that grants permissions to mutate, can be generated only during creation of the collection. -If created, the holder of the `MutatorRef` can change the `description` and the `URI length` of the collection. -```rust -public entry fun create_collection(creator: &signer) { - let collection_constructor_ref = &collection::create_unlimited_collection( - creator, - "My Collection Description", - "My Collection", - royalty, - "https://mycollection.com", - ); - let mutator_ref = collection::get_mutator_ref(collection_constructor_ref); - // Store the mutator ref somewhere safe -} -``` - -### Collection customization -A collection can be customized by adding more data (as resources) or functionalities. For example, a collection can track -when it was created in order to limit when tokens can be minted. -```rust -struct MyCollectionMetadata has key { - creation_timestamp_secs: u64, -} - -public entry fun create_collection(creator: &signer) { - // Constructor ref is a non-storable struct returned when creating a new object. - // It can generate an object signer to add resources to the collection object. - let collection_constructor_ref = &collection::create_unlimited_collection( - creator, - "My Collection Description", - "My Collection", - royalty, - "https://mycollection.com", - ); - // Constructor ref can be exchanged for signer to add resources to the collection object. - let collection_signer = &object::generate_signer(collection_constructor_ref); - move_to(collection_signer, MyCollectionMetadata { creation_timestamp_secs: timestamp::now_seconds() } }) -} -``` - -### Token creation -Creators can mint tokens, which are separate objects from the collection. This allows for greater customization. -Tokens can be created in two ways: -1. Named tokens. These tokens have deterministic addresses that are sha256 hash of the creator address, collection name, -and token name, concatenated. This allows for predictable addresses and easier querying of tokens. However, -named tokens are fully deletable and thus burning them will only delete the token data and not fully delete the underlying -object -```rust -use aptos_token_objects::token; - -public entry fun mint_token(creator: &signer) { - token::create_named_token( - creator, - "My Collection", - "My named Token description", - "My named token", - royalty, - "https://mycollection.com/my-named-token.jpeg", - ); -} -``` -2. (Unnamed) tokens based on the creator account's guid. These tokens have addresses are generated based on the creator -account's incrementing guid. The addresses of unnamed tokens are not deterministic as the account's guid can change outside -minting. Thus, querying for unnamed tokens is more difficult and requires indexing. -```rust -use aptos_token_objects::token; - -public entry fun mint_token(creator: &signer) { - token::create( - creator, - "My Collection", - "My named Token description", - "My named token", - royalty, - "https://mycollection.com/my-named-token.jpeg", - ); -} -``` - -Creators should cautiously consider whether they should use `create_named_token` or `create` when building -their custom collection/token. In general `create` is recommended as it allows for clean deletion if the -tokens are burnt and generally, deterministic addresses for tokens are not always necessary thanks to indexing services. -One example that would prefer deterministic addresses and thus `create_named_token` is a collection of soul bound tokens -where each token's address is created from the holder's name. - -### Token properties -Tokens by default have the following properties: -* Token name - unique within each collection. A collection cannot have more than one token with the same name. -* Token description - modifiable with a `MutatorRef` and smaller than 2048 characters -* Token URI length - modifiable with a `MutatorRef` and smaller than 512 characters -* Royalty - It's less common to have royalty setting on the token instead of collection. But this allows a token to have -a different royalty setting than the collection's. - -A `MutatorRef` can be generated only during creation of the token. -```rust -public entry fun mint_token(creator: &signer) { - // Constructor ref is a non-storable struct returned when creating a new object. - // It can be exchanged for signer to add resources to the token object. - let token_constructor_ref = &token::create( - creator, - "My Collection", - "My named Token description", - "My named token", - royalty, - "https://mycollection.com/my-named-token.jpeg", - ); - let mutator_ref = token::generate_mutator_ref(token_constructor_ref); - // Store the mutator ref somewhere safe -} -``` - -### Token customization -More data can be added to the token as resources, similar to [for collections](#collection-customization). - -### Token burn -Tokens can be burned by the creator if they generated and stored a `BurnRef` during the creation of the token. -```rust -public entry fun mint_token(creator: &signer) { - let token_constructor_ref = &token::create( - creator, - "My Collection", - "My named Token description", - "My named token", - royalty, - "https://mycollection.com/my-named-token.jpeg", - ); - let burn_ref = token::generate_burn_ref(token_constructor_ref); - // Store the burn ref somewhere safe -} - -public entry fun burn_token(token: Object) { - // Remove all custom data from the token object. - let token_address = object::object_address(&token); - let CustomData { ... } = move_from(token_address); - - // Retrieve the burn ref from storage - let burn_ref = ...; - token::burn(burn_ref); -} -``` -Note that if any custom data was added to the token objects, the `burn_token` function needs to first remove those data. -token::burn only deletes the object if it was created as an unnamed token. Named token will have all token data removed, -but the object will stay, thus creating a "burned" defunct object. - -### Token transfer -Tokens can be simply transferred as objects to any user via `object::transfer` - -## Aptos Token -[Aptos Token](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/aptos_token.move) -is a "no code" solution that builds on top of the base Aptos Digital Asset Standard and provides a more complete solution that -allows creators to mint NFTs without writing any code. It provides the following main features: -* Soul bound tokens which are non-transferable by holders -* Custom defined properties stored in a [PropertyMap](#property-map), a simple map data structure of attribute name (string) -> values (bytes). -* [Freezing and unfreezing transfers of non-soul bound tokens](#creator-management) -* [Creator management functionalities - modify a collection or token's metadata](#creator-management) - -### Property Map -Similar to in Token Standard v1, Aptos Token provides an extensible [PropertyMap](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/property_map.move) -that provides type safe, but generic properties for a given NFT. Creators can set pass initial properties when minting a -token and can freely add or remove properties later. - -Tokens can be minted using the provided `aptos_token::mint`. This is an entry function and can be called via a transaction -directly. -```rust -public entry fun mint( - creator: &signer, - collection: String, - description: String, - name: String, - uri: String, - property_keys: vector, - property_types: vector, - property_values: vector>, - ) acquires AptosCollection, AptosToken -``` - -To mint a soul bound token, the creator can call `aptos_token::mint_soul_bound` instead. This will create a token that -the holder cannot transfer. -```rust -public entry fun mint_soul_bound( - creator: &signer, - collection: String, - description: String, - name: String, - uri: String, - property_keys: vector, - property_types: vector, - property_values: vector>, - soul_bound_to: address, -) acquires AptosCollection -``` - -### Creator management -By default, the creator can do the following: -* Mint and burn tokens, including soul bound tokens -* Disallow transferring a token (freeze) and allow transferring a token (unfreeze) -* Update the collection's description and uri -* Add/Remove metadata properties from a token's property map -* Update a collection's royalty setting -* Update a token's name, description and uri - -### Further customization -Aptos Token is provided as a "no code" convenient solution, but it's not extensible. This is evident as most functions -are entry functions and do not return any ref (constructor, mutator, etc.). The `aptos_token` module stores and manages -the refs obtained from creating the collection and token objects and do not expose raw access to them. - -If a creator wants more custom functionalities such as being able to forcefully transfer a soul bound token, they would -need to write their own custom module that builds on top of the base Aptos Digital Asset Standard. They can of course borrow inspiration -and code from the Aptos Token module. - -## Fungible Token -Similar to [EIP-1155](https://eips.ethereum.org/EIPS/eip-1155), the Aptos Digital Asset Standard also supports fungible tokens -(also known as semi-fungible tokens). An example of this would be armor tokens in a game. Each armor token represents a -type of armor and is a token in a collection with metadata (e.g. durability, defense, etc.) and can be minted and burned. -However, there are multiple instances of the same armor type. For example, a player can have 3 wooden armors, where wooden armor -is a token in the Armor collection. - -This can be easily built by creating an asset that is both a Digital Asset (DA) and a Fungible Asset (FA), resulting in a Digital and Fungible Asset (DFA). After the creator creates the Armor collection and the -Wooden Armor token, they can make the Wooden Armor token "fungible": - -```rust -use aptos_framework::primary_fungible_store; - -public entry fun create_armor_collection(creator: &signer) { - collection::create_unlimited_collection( - creator, - "Collection containing different types of armors. Each armor type is a separate token", - "Armor", - royalty, - "https://myarmor.com", - ); -} - -public entry fun create_armor_type(creator: &signer, armor_type: String) { - let new_armor_type_constructor_ref = &token::create( - creator, - "Armor", - "Armor description", - armor_type, - royalty, - "https://myarmor.com/my-named-token.jpeg", - ); - // Make this armor token fungible so there can multiple instances of it. - primary_fungible_store::create_primary_store_enabled_fungible_asset( - new_armor_type_constructor_ref, - maximum_number_of_armors, - armor_type, - "ARMOR", - 0, // Armor cannot be divided so decimals is 0, - "https://mycollection.com/armor-icon.jpeg", - "https://myarmor.com", - ); - - // Add properties such as durability, defence, etc. to this armor token -} -``` - -Now the creator can mint multiple instances of the same armor type and transfer them to players. The players can freely -transfer the armor tokens to each other the same way they would transfer a fungible asset. diff --git a/developer-docs-site/docs/standards/fungible-asset.md b/developer-docs-site/docs/standards/fungible-asset.md deleted file mode 100644 index e9dffba3a1fc4..0000000000000 --- a/developer-docs-site/docs/standards/fungible-asset.md +++ /dev/null @@ -1,448 +0,0 @@ ---- -title: "Aptos Fungible Asset Standard" -id: "fungible-asset" ---- -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; - -# Aptos Fungible Asset Standard - -The Aptos Fungible Asset Standard (also known as Fungible Asset or FA) is a core framework component within Aptos that enables the tokenization of various assets, including commodities, real estate, and financial instruments. This standard facilitates the creation of decentralized financial applications. - -The tokenization of securities and commodities provides fractional ownership, making these markets more accessible to a broader range of investors. -Fungible tokens can also represent real estate ownership, enabling fractional ownership and providing liquidity to a traditionally illiquid market. -In-game assets such as virtual currencies and characters can be tokenized, enabling players to own and trade their assets and creating new revenue streams for game developers and players. - -Besides the aforementioned features, Fungible Asset (FA) is a superset of cryptocurrency, as coin is just one type of Fungible Asset. The Fungible Asset framework could replace the coin module in Move. - -The [Fungible Asset module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/fungible_asset.move) provides a standard, type-safe framework for defining FAs within the Aptos Move ecosystem. - -The standard is built upon [Aptos object model](./aptos-object.md), so all the resources defined here are included in the object resource group and stored inside objects. -There are two types of objects related to FA: - -- `Object`: include information about the FA, such as name, symbol, and decimals. -- `Object`: store a specific amount of FA units. FAs are units that are interchangeable with others of the same metadata. They can be stored in objects that contain a FungibleStore resource. These store objects can be freely created, and FAs can be moved, split, and combined between them easily. - -The standard also supports minting new units and burning existing units with appropriate controls. - -The different objects involved - `Object` and `Object` objects, and their relationships to accounts are shown in the diagram below: - -
- -
- -## Difference with Aptos Coin - -FA is a broader category than just coins. While fungible coins are just one possible use case of FA, it can represent a wider range of fungible items, such as in-game assets like gems or rocks, event tickets, and partial ownership of real-world assets. FA provides the flexibility for customizable, detailed management and offers a new programming model based on objects. -For Aptos coin, a `Coin` uses a generic, or the `CoinType`, to support distinct typing within the Coin framework. For example, `Coin
` and `Coin` are two distinct coins, if `A != B`. In contrast, FA does not have a generic in struct definition but uses the metadata reference to distinguish the type, which will be further explained later. -Minimally, [Aptos coin](./aptos-coin.md) should be interchangeable with FA. The migration plan is under discussion. - -## Structures - -### Metadata Object - -Metadata objects with unique addresses define the type of the FAs. Even if `Metadata` structs of two `Object` are exactly the same, as long as their addresses are different, the FAs points to them would be different. In short, the address of the metadata object can be used as **unique identifier** of the FA type. - -```rust -#[resource_group_member(group = aptos_framework::object::ObjectGroup)] -struct Metadata has key { - supply: Option, - /// Name of the fungible metadata, i.e., "USDT". - name: String, - /// Symbol of the fungible metadata, usually a shorter version of the name. - /// For example, Singapore Dollar is SGD. - symbol: String, - /// Number of decimals used for display purposes. - /// For example, if `decimals` equals `2`, a balance of `505` coins should - /// be displayed to a user as `5.05` (`505 / 10 ** 2`). - decimals: u8, -} -``` - -### Fungible Asset and Fungible Store - -FA allows typing by allocating an object reference that points to the metadata. Hence, a set of units of FA is represented as an amount and a reference to the metadata, as shown: - -```rust -struct FungibleAsset { - metadata: Object, - amount: u64, -} -``` - -The FAs is a struct representing the type and the amount of units held. As the struct does not have either key or store abilities, it can only be passed from one function to another but must be consumed by the end of a transaction. Specifically, it must be deposited back into a fungible store at the end of the transaction, which is defined as: - -```rust -#[resource_group_member(group = aptos_framework::object::ObjectGroup)] -struct FungibleStore has key { - /// The address of the base metadata object. - metadata: Object, - /// The balance of the fungible metadata. - balance: u64, - /// FAs transferring is a common operation, this allows for freezing/unfreezing accounts. - frozen: bool, -} -``` - -:::tip -FAs are always stored in the top-level `FungibleStore` resource. This makes it much easier to find, analyze, and control. -::: - -The only extra field added here is `frozen`. if it is `true`, this object is frozen, i.e., deposit and withdraw are both disabled without using `TransferRef` in the next section. - -### References - -_Reference_ (ref) is the means to implement granular permission control across different standards in Aptos. In different contexts, it may be called _capabilities_. The FA standard has three distinct refs for minting, transferring, and burning FA: `MintRef`, `TransferRef`, and `BurnRef`. Each ref contains a reference to the FA metadata: - -```rust -struct MintRef has drop, store { - metadata: Object -} - -struct TransferRef has drop, store { - metadata: Object -} - -struct BurnRef has drop, store { - metadata: Object -} -``` - -Ref owners can do the following operations depending on the refs they own: - -- `MintRef` offers the capability to mint new FA units. -- `TransferRef` offers the capability to mutate the value of `freeze` in any `FungbibleStore` of the same metadata or transfer FA by ignoring `freeze`. -- `BurnRef` offers the capability to burn or delete FA units. - -The three refs collectively act as the building blocks of various permission control systems as they have `store` so can be passed around and stored anywhere. Please refer to the source file for `mint()`, `mint_to()`, `burn()`, `burn_from()`, `withdraw_with_ref()`, `deposit_with_ref()`, and `transfer_with_ref()`: These functions are used to mint, burn, withdraw, deposit, and transfer FA using the MintRef, BurnRef, and TransferRef. - -Note, these are framework functions and must be combined with business logic to produce a usable system. Developers who want to use these functions should familiarize themselves with the concepts of [Aptos object model](./aptos-object.md) and understand how the reference system enables extensible designs within Aptos move. - -### Creators - -A Fungible Asset creator can add fungibility to any **undeletable** object at creation by taking `&ConstructorRef` with the required information to -make that object a metadata of the associated FA. Then FA of this metadata can be minted and used. It is noted here that -**undeletable** means the `can_delete` field of `&ConstructorRef` has to be `false`. - -```rust -public fun add_fungibility( - constructor_ref: &ConstructorRef, - maximum_supply: Option, - name: String, - symbol: String, - decimals: u8, - icon_uri: String, - project_uri: String, -): Object -``` - -The creator has the opportunity to define a name, symbol, decimals, icon uri, project uri, and whether the total supply for the FA has a maximum. The following applies: - -- The first three of the above (`name`, `symbol`, `decimals`, `icon_uri`, `project_uri`) are purely metadata and have no impact for onchain - applications. Some applications may use decimals to equate a single Coin from a fractional coin. -- Maximum supply (`maximum_supply`) helps check the total supply does not exceed a maximum value. However, due to the way the parallel executor - works, setting the maximum supply will prevent any parallel execution of mint and burn. - -### Users - -Users are FA holders, who can: - -- Merge two FAs of the same metadata object. -- Extract FA partially from another. -- Deposit to and withdraw from a `FungibleStore` and emit events as a result. - -### Primitives - -At creation, the creator has the option to generate refs from the same `&ConstructorRef` to manage FA. These will need -to be stored in global storage to be used later. - -#### Mint - -If the manager would like to mint FA, they must retrieve a reference to `MintRef` and call: - -```rust -public fun mint(ref: &MintRef, amount: u64): FungibleAsset -``` - -This will produce a new FA of the metadata in the ref, containing a value as dictated by the `amount`. The supply will also be adjusted. Also, there is a `mint_to` function that deposits to a `FungibleStore` -after minting as a helper. - -#### Burn - -The opposite operation of minting. Likewise, a reference to `BurnRef` is required and call: - -```rust -public fun burn(ref: &BurnRef, fa: FungibleAsset) -``` - -This will reduce the passed-in `fa` to ashes and adjust the supply. There is also a `burn_from` function that forcibly withdraws FA -from an account first and then burns the withdrawn FA as a helper. - -#### Transfer and Freeze/Unfreeze - -`TransferRef` has two functions: - -- Flip `frozen` in `FungibleStore` holding FA of the same metadata in the `TransferRef`. if - it is false, the store is "frozen" that nobody can deposit to or withdraw from this store without using the ref. -- Withdraw from or deposit to a store ignoring `frozen` field. - -To change `frozen`, call: - -```rust -public fun set_frozen_flag( - ref: &TransferRef, - store: Object, - frozen: bool, -) -``` - -:::tip -This function will emit a `FrozenEvent`. -::: - -To forcibly withdraw, call: - -```Rust -public fun withdraw_with_ref( - ref: &TransferRef, - store: Object, - amount: u64 -): FungibleAsset -``` - -:::tip -This function will emit a `WithdrawEvent`. -::: - -To forcibly deposit, call: - -```rust -public fun deposit_with_ref( - ref: &TransferRef, - store: Object, - fa: FungibleAsset -) -``` - -:::tip -This function will emit a `DepositEvent`. -::: - -There is a function named `transfer_with_ref` that combining `withdraw_with_ref` and `deposit_with_ref` together as -a helper. - -#### Merging Fungible Assets - -Two FAs of the same type can be merged into a single struct that represents the accumulated value of the two -independently by calling: - -```rust -public fun merge(dst_fungible_asset: &mut FungibleAsset, src_fungible_asset: FungibleAsset) -``` - -After merging, `dst_fungible_asset` will have all the amounts. - -#### Extracting Fungible Asset - -A Fungible Asset can have `amount` deducted to create another FA by calling: - -```rust -public fun extract(fungible_asset:& mut FungibleAsset, amount: u64): FungibleAsset -``` - -:::tip -This function may produce FA with 0 amount, which is not usable. It is supposed to be merged with other FA or destroyed -through `destroy_zero()` in the module. -::: - -#### Withdraw - -The owner of a `FungibleStore` object that is not frozen can extract FA with a specified amount, by calling: - -```rust -public fun withdraw(owner: &signer, store: Object, amount: u64): FungibleAsset -``` - -:::tip -This function will emit a `WithdrawEvent`. -::: - -#### Deposit - -Any entity can deposit FA into a `FungibleStore` object that is not frozen, by calling: - -```rust -public fun deposit(store: Object, fa: FungibleAsset) -``` - -:::tip -This function will emit a `DepositEvent`. -::: - -#### Transfer - -The owner of a `FungibleStore` can directly transfer FA from that store to another if neither is frozen by calling: - -```rust -public entry fun transfer(sender: &signer, from: Object, to: Object, amount: u64) -``` - -:::tip -This will emit both `WithdrawEvent` and `DepositEvent` on the respective `Fungibletore`s. -::: - -## Events - -- `DepositEvent`: Emitted when FAs are deposited into a store. -- `WithdrawEvent`: Emitted when FAs are withdrawn from a store. -- `FrozenEvent`: Emitted when the frozen status of a fungible store is updated. - -```rust -struct DepositEvent has drop, store { - amount: u64, -} -``` - -```rust -struct WithdrawEvent has drop, store { - amount: u64, -} -``` - -```rust -struct FrozenEvent has drop, store { - frozen: bool, -} -``` - -# Primary and secondary `FungibleStore`s - -Each `FungibleStore` object has an owner. However, an owner may possess more than one store. When Alice sends FA to -Bob, how does she determine the correct destination? Additionally, what happens if Bob doesn't have a store yet? - -To address these questions, the standard has been expanded to define primary and secondary stores. - -- Each account owns only one undeletable primary store for each type of FA, the address of which is derived in a deterministic - manner from the account address and metadata object address. If primary store does not exist, it will be created if - FA is going to be deposited by calling functions defined in `primary_fungible_store.move` -- Secondary stores do not have deterministic addresses and are theoretically deletable. Users are able to create as many - secondary stores as they want using the provided functions but there is a caveat that addressing secondary stores - on-chain may need extra work. - -The vast majority of users will have primary store as their only store for a specific type of FAs. It is -expected that secondary stores would be useful in complicated defi or other asset management contracts that will be -introduced in other tutorials using FA. - -## How to enable Primary `FungibleStore`? - -To add primary store support, when creating a metadata object, instead of the aforementioned `add_fungibility()`, the creator -has to call: - -```rust -public fun create_primary_store_enabled_fungible_asset( - constructor_ref: &ConstructorRef, - maximum_supply: Option, - name: String, - symbol: String, - decimals: u8, - icon_uri: String, - project_uri: String, -) -``` - -The parameters are the same as those of `add_fungibility()`. - -## Primitives - -### Get Primary `FungibleStore` - -To get the primary store object of a metadata object belonging to an account, call: - -```rust -public fun primary_store(owner: address, metadata: Object): Object -``` - -:::tip -There are other utility functions. `primary_store_address` returns the deterministic address the primary store, -and `primary_store_exists` checks the existence, etc. -::: - -### Manually Create Primary `FungibleStore` - -If a primary store does not exist, any entity is able to create it by calling: - -```rust -public fun create_primary_store(owner_addr: address, metadata: Object): Object -``` - -### Check Balance and Frozen Status - -To check the balance of a primary store, call: - -```rust -public fun balance(account: address, metadata: Object): u64 -``` - -To check whether the given account's primary store is frozen, call: - -```rust -public fun is_frozen(account: address, metadata: Object): bool -``` - -### Withdraw - -An owner can withdraw FA from their primary store by calling: - -```rust -public fun withdraw(owner: &signer, metadata: Object, amount: u64): FungibleAsset -``` - -### Deposit - -An owner can deposit FA to their primary store by calling: - -```rust -public fun deposit(owner: address, fa: FungibleAsset) -``` - -### Transfer - -An owner can deposit FA from their primary store to that of another account by calling: - -```rust -public entry fun transfer(sender: &signer, metadata: Object, recipient: address, amount: u64) -``` - -## Secondary `FungibleStore` - -Secondary stores are not commonly used by normal users but prevailing for smart contracts to manage assets owned by -contracts. For example, an asset pool may have to manage multiple fungible stores for one or more types of FA. Those -stores do not necessarily have to have deterministic addresses and a user may have multiple stores for a given kind of -FA. So primary fungible store is not a good fit for the needs where secondary store plays a vital role. - -The way to create secondary store is to create an object first and get its `ConstructorRef`. Then call: - -```rust -public fun create_store( - constructor_ref: &ConstructorRef, - metadata: Object, -): Object -``` - -It will turn make the newly created object a `FungibleStore`. Sometimes an object can be reused as a store. For example, -a metadata object can also be a store to hold some FA of its own type or a liquidity pool object can be a store of the -issued liquidity pool's token/coin. - -## Ownership of `FungibleStore` - -It is crucial to set correct owner of a `FungibleStore` object for managing the FA stored inside. By default, the owner -of a newly created object is the creator whose `signer` is passed into the creation function. For `FungibleStore` -objects managed by smart contract itself, usually they shouldn't have an owner out of the control of this contract. For -those cases, those objects could make themselves as their owners and keep their object `ExtendRef` at the proper place -to create `signer` as needed by the contract logic. diff --git a/developer-docs-site/docs/standards/index.md b/developer-docs-site/docs/standards/index.md deleted file mode 100644 index fa6b1c9920b48..0000000000000 --- a/developer-docs-site/docs/standards/index.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -title: "Aptos Standards" ---- - -# Aptos Standards - -Standards define a common interoperable interface for all developers to build upon. They consist of rules to ensure compatibility across applications and wallets on the Aptos blockchain. See a [list of known coin resource addresses](https://github.com/hippospace/aptos-coin-list) in Aptos provided by -hippospace. - -## Move Standard - -### [Aptos Object](./aptos-object.md) - -The [Object model](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/object.move) allows Move to represent a complex type as a set of resources stored within a single address and offers a rich capability model that allows for fine-grained resource control and ownership management. - -## Asset Standards - -### [Digital Asset (DA)](./digital-asset.md) - -The new [Aptos Digital Asset Standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/token.move) allows: - -- Rich, flexible assets and collectibles. -- Easy enhancement of base functionality to provide richer custom functionalities. An example of this is the [aptos_token module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token-objects/sources/aptos_token.move) - -Digital Asset (DA) is recommended for any new collections or protocols that want to build NFT or semi-fungible tokens. - -### [Fungible Asset (FA)](./fungible-asset.md) - -The new [Aptos Fungible Asset Standard](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/fungible_asset.move) is a standard meant for simple, typesafe, and fungible assets based on object model intending to replace Aptos coin. -Fungible Asset (FA) offers more features and flexibilities to Aptos move developers on creating and managing fungible assets. - - -## Wallet Standard - -### [Aptos Wallet](./wallets.md) - -The Wallet standard ensures that all wallets use the same functionality for key features. This includes: - -- The same mnemonic so that wallets can be moved between providers. -- [Wallet adapter](../integration/wallet-adapter-concept.md) so that all applications can interact seamlessly with a common interface. - -## Legacy Standards - -### [Aptos Token](./aptos-token.md) - -The old existing [Token module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-token/sources/token.move), on the other hand: - -- Encapsulates rich, flexible assets and collectibles. These assets are discrete (non-decimal) and can be fungible, semi-fungible, or nonfungible. -- The token standard is in its own `AptosToken` package at the Address `0x3` to allow for rapid iteration based on feedback from the community. - -### [Aptos Coin](./aptos-coin.md) - -The [Coin module](https://github.com/aptos-labs/aptos-core/blob/main/aptos-move/framework/aptos-framework/sources/coin.move) is a lightweight standard meant for simple, typesafe, and fungible assets. The coin standard is separated out into its own Move module to ensure that: - -- Applications and users can create and use simple tokens, with high performance and low gas overhead. -- The Coin standard is part of the Aptos core framework so it can be used for currencies, including the gas currency. \ No newline at end of file diff --git a/developer-docs-site/docs/standards/multisig-managed-fungible-asset.md b/developer-docs-site/docs/standards/multisig-managed-fungible-asset.md deleted file mode 100644 index de8b078465b6d..0000000000000 --- a/developer-docs-site/docs/standards/multisig-managed-fungible-asset.md +++ /dev/null @@ -1,132 +0,0 @@ ---- -title: "Manage Fungible Assets with Aptos Multisig Account" -slug: "multisig-managed-fungible-assets" ---- - -# Manage Fungible Assets with Aptos Framework Multisig Account - -This tutorial introduces a practical use case that combines Aptos framework multisig account with fungible asset standard to enhance the security margin of the management of fungible assets. Make sure you have understood module publishing and Aptos framework multisig account before moving on to the tutorial. If not, it is highly recommended to try out the following tutorials first: - -* [Your First Move Module](../tutorials/first-move-module.md) - -## Step 1: Pick an SDK - -This tutorial was created for the [TypeScript SDK](../sdks/ts-sdk/index.md). - -Other developers are invited to add support for the [Python SDK](../sdks/python-sdk.md), [Rust SDK](../sdks/rust-sdk.md), and [Unity SDK](../sdks/unity-sdk.md)! - -## Step 2: Publish the module - -To create a fungible asset controlled by an Aptos framework multisig account with all the administrative operations (mint, transfer, burn, freeze/unfreeze), a well-designed smart contract based on fungible asset standard is a prerequisite. The Aptos team provides an example code in `aptos-core` repo. - -Clone the `aptos-core` repo: - -```bash -git clone git@github.com:aptos-labs/aptos-core.git ~/aptos-core -``` - -Navigate to the `managed_fungible_asset` directory and then publish this package onto your `default` account using CLI: - -```bash -cd ~/aptos-core/aptos-move/move-examples/fungible_asset/managed_fungible_asset -aptos move publish --named-addresses example_addr=default -``` - -Navigate to the `multisig_managed_coin` directory and then publish this package onto your `default` account using CLI too: - -```bash -cd ~/aptos-core/aptos-move/move-examples/fungible_asset/multisig_managed_coin -aptos move publish --named-addresses example_addr=default -``` - -For this tutorial, `multisig_managed_coin` need to call functions defined in `managed_fungible_asset` on the same address. So both modules have to be published. - -:::tip -Do not forget to fund the account with faucet before publishing modules. -::: - -## Step 3: Start The example - -```bash -cd ~/aptos-core/ecosystem/typescript/sdk/examples/typescript -``` - -Run the `multisig_managed_coin` example: - -```bash -MODULE_ADDR=${DEFAULT_ACCOUNT_ADDRESS} pnpm run multisig_managed_coin -``` - -:::tip -This example uses the Aptos devnet, which has historically been reset each Thursday. -Make sure devnet is live when you try running the example! -if you are running local-testnet with faucet, you can run the following command instead: - -```bash -APTOS_NODE_URL=http://0.0.0.0:8080 APTOS_FAUCET_URL=http://0.0.0.0:8081 MODULE_ADDR=${DEFAULT_ACCOUNT_ADDRESS} pnpm run multisig_managed_coin -``` - -::: - -The example script should execute successfully without any errors. Then you are able to see what it did by searching the `owner1` and `owner2` addresses printed to the console on Aptos explorer. - -Let's follow the script to understand what it does: - -### Generate single signer accounts - -First, we will generate three single signer accounts, owner1, owner2 and owner3 who will co-own an Aptos framework multisig account. - -```typescript title="Generate 3 single signers" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_1 -``` - -### Create an Aptos framework multisig account with a managed fungible asset - -Next, let owner1 call the `initialize()` function defined in `multisig_managed_coin.move`, which first create an Aptos framework multisig account owned by owner1 and add both owner2 and owner3 as owners. Also, it creates a fungible asset called "meme coin" with customized settings denoted in the argument list and make the multisig account the admin of the fungible asset. -Also, each proposal needs at least 2 approvals to execute. - -```typescript title="Query the multisig account and then call the initialize function" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_2 -``` - -### Mint - -Then we mint 1000 and 2000 meme coin to owner2 and owner3, respectively. The proposed transaction is submitted by owner2 and gets an additional approval from owner3. - -```typescript title="Mint 1000 to owner2 and 2000 to owner3" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_3 -``` - -### Freeze - -After minting, the example shows how to freeze account owner1. The proposed transaction is again submitted by owner2 and approved by owner3 in addition. - -```typescript title="Freeze owner1" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_4 -``` - -:::tip -Unfreeze is similar that just replace the last argument of `set_primary_stores_frozen_status` function to `false`. -::: - -### Force transfer - -When owner1 is frozen, normal transfer cannot withdraw from or deposit to that account. But as the admin of "meme coin", the multisig account has the capability to do that. -Next, Owner2 proposed a transaction to force transfer 1000 meme coins from owner3 to owner1. This time, owner1 approves it. - -```typescript title="Force transfer 1000 meme coins from owner3 to owner1" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_5 -``` - -### Burn - -Finally, all the three owners have 1000 meme coins. Let's burn all the coins! Owner2 makes the proposal and owner1 approves it. - -```typescript title="Burn 1000 meme coins from all the three owners' accounts" -:!: static/sdks/typescript/examples/typescript/multisig_managed_coin.ts section_6 -``` - -## Conclusion - -This tutorial shows an e2e flow of using Aptos framework multisig account to administrate fungible asset. Similarly, you can create your own module and leverage our powerful SDK to create the administration schema that fits your needs. - diff --git a/developer-docs-site/docs/standards/wallets.md b/developer-docs-site/docs/standards/wallets.md deleted file mode 100644 index 776f651c73595..0000000000000 --- a/developer-docs-site/docs/standards/wallets.md +++ /dev/null @@ -1,225 +0,0 @@ ---- -title: "Aptos Wallet Standard" ---- -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Aptos Wallet Standard - -The wallet standard provides guidelines for interoperability between wallet types. This ensures dapp developers do not need to change -their applications to handle different wallets. This standard offers a single interface for all dapp developers, allowing easy additions of new wallets and more users to each application. This interoperability allows users to choose which wallet they want without worrying about whether apps support their use cases. - -In order to ensure interoperability across Aptos wallets, the following is required: -1. Mnemonics - a set of words that can be used to derive account private keys -2. dapp API - entry points into the wallet to support access to identity managed by the wallet -3. Key rotation - the feature handling both the relationship around mnemonics and the recovery of accounts in different wallets - -## Mnemonics phrases - -A mnemonic phrase is a multiple word phrase that can be used to generate account addresses. -We recommend one mnemonic per account in order to handle key rotation better. -However, some wallets may want to support one mnemonic to many accounts coming from other chains. To support both of these use cases, the Aptos wallet standard uses a [Bitcoin Improvement Proposal (BIP44)](https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki) to derive path for mnemonics to accounts. - -### Creating an Aptos account - -Aptos account creation can be supported across wallets in the following manner: - -1. Generate a mnemonic phrase, for example with BIP39. -2. Get the master seed from that mnemonic phrase. -3. Use the BIP44-derived path to retrieve an account address (e.g. `m/44'/637'/0'/0'/0'`) - - See the [Aptos TypeScript SDK's implementation for the derive path](https://github.com/aptos-labs/aptos-core/blob/1bc5fd1f5eeaebd2ef291ac741c0f5d6f75ddaef/ecosystem/typescript/sdk/src/aptos_account.ts#L49-L69)) - - For example, Petra Wallet always uses the path `m/44'/637'/0'/0'/0'` since there is one mnemonic per one account. - - -```typescript -/** - * Creates new account with bip44 path and mnemonics, - * @param path. (e.g. m/44'/637'/0'/0'/0') - * Detailed description: {@link https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki} - * @param mnemonics. - * @returns AptosAccount - */ - static fromDerivePath(path: string, mnemonics: string): AptosAccount { - if (!AptosAccount.isValidPath(path)) { - throw new Error("Invalid derivation path"); - } - - const normalizeMnemonics = mnemonics - .trim() - .split(/\s+/) - .map((part) => part.toLowerCase()) - .join(" "); - - const { key } = derivePath(path, bytesToHex(bip39.mnemonicToSeedSync(normalizeMnemonics))); - - return new AptosAccount(new Uint8Array(key)); - } -``` - -### Supporting one mnemonic per multiple account wallets - -This is not recommended because the one-mnemonic-to-many-accounts paradigm makes it harder to handle rotated keys (the mnemonic changes for one account but not others). -However, many wallets from other ecosystems use this paradigm, and take these steps to generate accounts - -1. Generate a mnemonic phrase, for example with BIP39. -2. Get the master seed from that mnemonic phrase. -4. Use the BIP44-derived path to retrieve private keys (e.g. `m/44'/637'/i'/0'/0'`) where `i` is the account index. - - See the [Aptos TypeScript SDK's implementation for the derive path](https://github.com/aptos-labs/aptos-core/blob/1bc5fd1f5eeaebd2ef291ac741c0f5d6f75ddaef/ecosystem/typescript/sdk/src/aptos_account.ts#L49-L69)) -6. Increase `i` until all of the accounts the user wants to import are found. - - Note: The iteration should be limited, if an account doesn't exist during iteration, keep iterating for a constant `address_gap_limit` (10 for now) to see if there are any other accounts. If an account is found we will continue to iterate as normal. - -ie. -```typescript -const gapLimit = 10; -let currentGap = 0; - -for (let i = 0; currentGap < gapLimit; i += 1) { - const derivationPath = `m/44'/637'/${i}'/0'/0'`; - const account = fromDerivePath(derivationPath, mnemonic); - const response = account.getResources(); - if (response.status !== 404) { - wallet.addAccount(account); - currentGap = 0; - } else { - currentGap += 1; - } -} -``` - -## dapp API - -More important than account creation, is how wallets connect to dapps. Additionally, following these APIs will allow for the wallet developer to integrate with the [Aptos Wallet Adapter Standard](../integration/wallet-adapter-concept.md). The APIs are as follows: - -- `connect()`, `disconnect()` -- `account()` -- `network()` -- `signAndSubmitTransaction(transaction: EntryFunctionPayload)` -- `signMessage(payload: SignMessagePayload)` -- Event listening (`onAccountChanged(listener)`, `onNetworkChanged(listener)`) - -```typescript -// Common Args and Responses - -// For single-signer account, there is one publicKey and minKeysRequired is null. -// For multi-signer account, there are multiple publicKeys and minKeysRequired value. -type AccountInfo { - address: string; - publicKey: string | string[]; - minKeysRequired?: number; // for multi-signer account -} - -type NetworkInfo = { - name: string; - chainId: string; - url: string; -}; - -// The important thing to return here is the transaction hash, the dApp can wait for it -type [PendingTransaction](https://github.com/aptos-labs/aptos-core/blob/1bc5fd1f5eeaebd2ef291ac741c0f5d6f75ddaef/ecosystem/typescript/sdk/src/generated/models/PendingTransaction.ts) - -type [EntryFunctionPayload](https://github.com/aptos-labs/aptos-core/blob/1bc5fd1f5eeaebd2ef291ac741c0f5d6f75ddaef/ecosystem/typescript/sdk/src/generated/models/EntryFunctionPayload.ts) - - -``` - -### Connection APIs - -The connection APIs ensure that wallets don't accept requests until the user acknowledges that they want to see the requests. This keeps -the user state clean and prevents the user from unknowingly having prompts. - -- `connect()` will prompt the user for a connection - - return `Promise` -- `disconnect()` allows the user to stop giving access to a dapp and also helps the dapp with state management - - return `Promise` - -### State APIs -#### Get Account -**Connection required** - -Allows a dapp to query for the current connected account address and public key - -- `account()` no prompt to the user - - returns `Promise` - -#### Get Network -**Connection required** - -Allows a dapp to query for the current connected network name, chain ID, and URL - -- `network()` no prompt to the user - - returns `Promise` - -### Signing APIs -#### Sign and submit transaction -**Connection required** - -Allows a dapp to send a simple JSON payload using the [TypeScript SDK](https://github.com/aptos-labs/aptos-core/blob/1bc5fd1f5eeaebd2ef291ac741c0f5d6f75ddaef/ecosystem/typescript/sdk/src/aptos_client.ts#L217-L221) -for signing and submission to the current network. The user should be prompted for approval. - -- `signAndSubmitTransaction(transaction: EntryFunctionPayload)` will prompt the user with the transaction they are signing - - returns `Promise` - -#### Sign message -**Connection required** - -Allows a dapp to sign a message with their private key. The most common use case is to verify identity, but there are a few other possible use -cases. The user should be prompted for approval. You may notice some wallets from other chains just provide an interface to sign arbitrary strings. This can be susceptible to man-in-the-middle attacks, signing string transactions, etc. - -Types: -```typescript -export interface SignMessagePayload { - address?: boolean; // Should we include the address of the account in the message - application?: boolean; // Should we include the domain of the dApp - chainId?: boolean; // Should we include the current chain id the wallet is connected to - message: string; // The message to be signed and displayed to the user - nonce: string; // A nonce the dApp should generate -} - -export interface SignMessageResponse { - address?: string; - application?: string; - chainId?: number; - fullMessage: string; // The message that was generated to sign - message: string; // The message passed in by the user - nonce: string, - prefix: string, // Should always be APTOS - signature: string | string[]; // The signed full message - bitmap?: Uint8Array; // a 4-byte (32 bits) bit-vector of length N -} -``` - -- `signMessage(payload: SignMessagePayload)` prompts the user with the `payload.message` to be signed - - returns `Promise` - -An example: -`signMessage({nonce: 1234034, message: "Welcome to dApp!", address: true, application: true, chainId: true })` - -This would generate the `fullMessage` to be signed and returned as the `signature`: -```yaml -APTOS -address: 0x000001 -chain_id: 7 -application: badsite.firebase.google.com -nonce: 1234034 -message: Welcome to dApp! -``` - -Aptos has support for both single-signer and multi-signer accounts. If the wallet is single-signer account, there is exactly one signature and `bitmap` is null. If the wallet is a multi-signer account, there are multiple `signature` and `bitmap` values. The `bitmap` masks that public key that has signed the message. - -### Event listening - -To be added in the future: -- Event listening (`onAccountChanged(listener)`, `onNetworkChanged(listener)`) - -## Key rotation - -Key rotation is currently not implemented in any wallets. Mapping of rotated keys has been [implemented](https://github.com/aptos-labs/aptos-core/pull/2972), but SDK integration is in progress. - -Wallets that import a private key will have to do the following: -1. Derive the authentication key. -2. Lookup the authentication key onchain in the Account origination table. - - If the account doesn't exist, it's a new account. The address to be used is the authentication key. - - If the account does exist, it's a rotated key account, and the address to be used will come from the table. - -## Appendix -- **[Forum post with discussion](https://forum.aptoslabs.com/t/wallet-dapp-api-standards/11765/33)** about the dapp API diff --git a/developer-docs-site/docs/tools/aptos-cli/index.md b/developer-docs-site/docs/tools/aptos-cli/index.md deleted file mode 100644 index d048028c99f44..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/index.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -title: "Aptos CLI" -id: "index" -hidden: false ---- - -# Aptos CLI - -The `aptos` tool is a command line interface (CLI) for developing on the Aptos blockchain, debugging, and for node operations. Please refer to the following documentation for more information: - -- ### [Install Aptos CLI](install-cli/index.md) - -- ### [Use Aptos CLI](use-cli/use-aptos-cli.md) diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/automated-install.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/automated-install.md deleted file mode 100644 index 257592ad2b059..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/automated-install.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -title: "Install CLI by Script" ---- - -# Install CLI by script - -The `aptos` tool is a command line interface (CLI) for developing on the Aptos blockchain, debugging Move contracts, and conducting node operations. This document describes how to install the `aptos` CLI tool using the automated install script. - -## Prerequisites - -First, ensure you have Python 3.6+ installed: -``` -$ python3 --version -Python 3.9.13 -``` -If it is not installed, you can find installation instructions on [python.org](https://www.python.org/downloads/). - -## Install - -Follow these instructions to install the Aptos CLI on various operating systems. Regardless of the operating system, you will always be directed to the latest release of the Aptos CLI. - -
-macOS / Linux / Windows Subsystem for Linux (WSL) - -:::tip -These instructions have been tested on Ubuntu 20.04, Ubuntu 22.04, Arch Linux, MacOS (ARM), and WSL and assume you have either `curl` or `wget` installed to download the script. -::: - -In your terminal, run the following `curl` command: - -``` -curl -fsSL "https://aptos.dev/scripts/install_cli.py" | python3 -``` - -Or with `wget`: -``` -wget -qO- "https://aptos.dev/scripts/install_cli.py" | python3 -``` - -
- -
- -Windows (NT) - -:::tip -These instructions have been tested on Windows 11. -::: - -In Powershell: -``` -iwr "https://aptos.dev/scripts/install_cli.py" -useb | Select-Object -ExpandProperty Content | python3 -``` - -
- -## Update - -To trigger an update to the Aptos CLI, run `aptos update` and see output indicating success: -``` -{ - "Result": "CLI already up to date (v1.0.4)" -} -``` - -Alternatively, you may update your CLI by running the `python3 install_cli.py` installation script again and receiving output resembling: - -``` -Latest CLI release: 1.0.4 -Currently installed CLI: 1.0.4 - -The latest version (1.0.4) is already installed. -``` - - - diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/build-from-source.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/build-from-source.md deleted file mode 100644 index 9455ceb3ad5f2..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/build-from-source.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: "Build CLI from Source Code" ---- - -# Build Aptos CLI from Source Code - -If you are an advanced user and would like to build the CLI binary by downloading the source code, follow the below steps, [selecting the network branch](../../../guides/system-integrators-guide.md#choose-a-network) that meets your use case. Otherwise, [install the prebuilt CLI binaries](./download-cli-binaries.md) to ease ramp up and reduce variables in your environment. - -Begin by preparing your environment by following the instructions in [building Aptos from source](../../../guides/building-from-source.md), note, you can skip the last section on _Building Aptos_ as the instructions below build in release mode. - -
-Linux / macOS - -### Linux / macOS - -#### Building the Aptos CLI - -1. Build the CLI tool: `cargo build --package aptos --release` -1. The binary will be available in at `target/release/aptos` -1. (Optional) Move this executable to a place on your path. For example: `~/bin/aptos` -1. View help instructions by running `~/bin/aptos help` - -
- -
-Windows - -### Windows - -#### Building aptos-core - -1. Build the CLI tool: `cargo build --package aptos --release` -1. The binary will be available at `target\release\aptos.exe` -1. View help instructions by running `target\release\aptos.exe` - -
diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/download-cli-binaries.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/download-cli-binaries.md deleted file mode 100644 index 1f0bc2b092616..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/download-cli-binaries.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -title: "Download CLI Binaries" ---- - -# Download Aptos CLI Binaries - -The `aptos` tool is a command line interface (CLI) for developing on the Aptos blockchain, debugging Move contracts, and conducting node operations. This document describes how to install the `aptos` CLI tool using precompiled binaries that reduce variables in setting up your environment. Also see: - -- [Installing the Aptos CLI](./index.md) for alternatives to using the precompiled binaries. -- [Installing the Move Prover](./install-move-prover.md) for an optional tool to validate your Move code. -- [Using Aptos CLI](../use-cli/use-aptos-cli.md) for detailed instructions on employing the Aptos CLI. - -Binary releases are recommended for most users, otherwise see [Building Aptos From Source](../../../guides/building-from-source.md) - -
-macOS - -## MacOS -We do not release binaries for MacOS, you must use [brew](https://brew.sh/). - -
- -
-Linux - -## Linux - -:::tip -These instructions have been tested on Ubuntu 20.04. -::: - -1. Go to the [Aptos CLI release page](https://github.com/aptos-labs/aptos-core/releases?q=cli&expanded=true). -1. Click the **Assets** expandable menu for the latest release. -1. You will see the zip files with the filename of the format: `aptos-cli--`. These are the platform-specific pre-compiled binaries of the CLI. Download the zip file for your platform, dismissing any warnings. -1. Unzip the downloaded file. This will extract the `aptos` CLI binary file into your default downloads folder. -1. Move this extracted `aptos` binary file into your preferred local folder. - - :::tip - Upgrading? Remember to look in the default download folder - When you update the CLI binary with the latest version, note that the newer version binary will be downloaded to your default Downloads folder. Remember to move this newer version binary from the Downloads folder to `~/bin/aptos` folder (overwriting the older version). - ::: - -1. Make this `~/bin/aptos` an executable by running this command: - - `chmod +x ~/bin/aptos`. -1. Type `~/bin/aptos help` to read help instructions. -1. Add `~/bin` to your path in your `.bashrc` or `.zshrc` file for future use. - -
- -
-Windows 10, 11 and Windows Server 2022+ - -## Windows 10, 11 and Windows Server 2022+ - -:::tip -These instructions have been tested on Windows 11 and Windows Server 2022. Windows support is new and some features may be not complete. Open [GitHub issues](https://github.com/aptos-labs/aptos-core/issues) for bugs. -::: - -1. Go to the [Aptos CLI release page](https://github.com/aptos-labs/aptos-core/releases?q=cli&expanded=true). -1. Click the **Assets** expandable menu for the latest release. -1. You will see the zip files with the filename of the format: `aptos-cli--`. These are the platform-specific pre-compiled binaries of the CLI. Download the zip file for your platform, dismissing any warnings. -1. Unzip the downloaded file. This will extract the `aptos` CLI binary file into your default downloads folder. For example, on Windows it is the `\Users\user\Downloads` folder. -1. Move this extracted `aptos` binary file into your preferred local folder. - :::tip Upgrading? Remember to look in the default download folder - When you update the CLI binary with the latest version, note that the newer version binary will be downloaded to your default Downloads folder. Remember to move this newer version binary from the Downloads folder to your preferred location. - ::: -1. Open a powershell terminal via the windows start menu -1. In the powershell terminal, you can get help instructions by running the command with help. For example ` .\Downloads\aptos-cli-0.3.5-Windows-x86_64\aptos.exe help` to read help instructions. - -
diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/index.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/index.md deleted file mode 100644 index 052a55c22381b..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/index.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: "Install the Aptos CLI" ---- - -# Install the Aptos CLI - -You can install the Aptos CLI as a pre-compiled binary: - -- [Install the Aptos CLI with Homebrew](./install-from-brew.md) - Recommended for MacOS. -- [Install the Aptos CLI by script](./automated-install.md) - Recommended for Linux and Windows. -- [Download the prebuilt Aptos CLI binaries](./download-cli-binaries.md) - Generally not recommended, but useful if you prefer not to use an automated installation method. - -You can also [build the Aptos CLI from source code](./build-from-source.md). This allows you to build from any of the Aptos branches, including `devnet`, `testnet`, `mainnet`, and the latest code in `main`, the development branch. - -## (Optional) Installing the Move Prover - -Optionally, you can [install the Move Prover](./install-move-prover.md). diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/install-from-brew.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/install-from-brew.md deleted file mode 100644 index 1630815846c41..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/install-from-brew.md +++ /dev/null @@ -1,38 +0,0 @@ ---- -title: "Install the CLI with Brew" ---- - -# Install the Aptos CLI with Brew - -Recommended on macOS, `brew` is a package manager that allows for installing and updating packages in a single -command. - -:::tip Not supported on Windows -Brew is not supported fully on Windows -::: - -## Installation - -1. Ensure you have `brew` installed https://brew.sh/ -2. Open a terminal and enter the following commands -```bash - brew update # Gets the latest updates for packages - brew install aptos # Installs the Aptos CLI -``` -3. You can now get help instructions by running `aptos help`. You may have to open a new terminal window. -```bash - aptos help -``` - -## Upgrading the CLI - -Upgrading the CLI with brew is very simple, simply run - -```bash - brew update # Gets the latest updates for packages - brew upgrade aptos # Upgrades the Aptos CLI -``` - -## Additional details - -[Aptos CLI homebrew Readme](https://github.com/aptos-labs/aptos-core/blob/main/crates/aptos/homebrew/README.md) diff --git a/developer-docs-site/docs/tools/aptos-cli/install-cli/install-move-prover.md b/developer-docs-site/docs/tools/aptos-cli/install-cli/install-move-prover.md deleted file mode 100644 index df019c0ae35b6..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/install-cli/install-move-prover.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -title: "Install the Move Prover" ---- - -# Install the Move Prover - -If you want to use the [Move Prover](../../../move/prover/index.md), install the Move Prover dependencies after [installing the CLI binary](.). - -1. See [Building Aptos From Source](../../../guides/building-from-source.md) - -1. Then, in the checked out aptos-core directory, install additional Move tools: -
- Linux / macOS - - 1. Open a Terminal session. - 1. Run the dev setup script to prepare your environment: `./scripts/dev_setup.sh -yp` - 1. Update your current shell environment: `source ~/.profile` - - :::tip - `dev_setup.sh -p` updates your `~./profile` with environment variables to support the installed Move Prover tools. You may need to set `.bash_profile` or `.zprofile` or other setup files for your shell. - ::: - -
-
- Windows - - 1. Open a PowerShell terminal as an administrator. - 1. Run the dev setup script to prepare your environment: `PowerShell -ExecutionPolicy Bypass -File ./scripts/windows_dev_setup.ps1 -y` - -
- -1. You can now run the Move Prover to prove an example: - ```bash - aptos move prove --package-dir aptos-move/move-examples/hello_prover/ - ``` diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-account.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-account.md deleted file mode 100644 index 8f6e1a55b9a71..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-account.md +++ /dev/null @@ -1,419 +0,0 @@ ---- -title: "Account" -id: "cli-account" ---- - -## Account examples - -### Fund an account with the faucet - -You can fund an account with the faucet via the CLI by using either an account address or with `default` (which defaults to the account address created with `aptos init`). - -For example, to fund the account `00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696` that was created above with the `aptos init` command: - -```bash -$ aptos account fund-with-faucet --account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696 -{ - "Result": "Added 10000 coins to account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696" -} -``` - -```bash -$ aptos account fund-with-faucet --account default -{ - "Result": "Added 10000 coins to account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696" -} -``` - -### View an account's balance and transfer events - -You can view the balance and transfer events (deposits and withdrawals) either by explicitly specifying the account address, as below: - -```bash -$ aptos account list --query balance --account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696 -``` - -or by specifying the `default` as below: - -```bash -$ aptos account list --query balance --account default -``` - -Both the above commands will generate the following information on your terminal: - -```bash -{ - "Result": [ - { - "coin": { - "value": "110000" - }, - "deposit_events": { - "counter": "3", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "2" - } - } - }, - "frozen": false, - "withdraw_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "3" - } - } - } - } - ] -} -``` - -### Listing resources in an account - -You can list the resources in an account from the command line. For example, see below for how to list the resources in the account you just created above: - -```bash -$ aptos account list --query resources --account default -``` - -or - -```bash -$ aptos account list --query resources --account 0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696 -``` - -Both the above commands will generate the following resource list information on your terminal: - -```bash -{ - "Result": [ - { - "0x1::coin::CoinStore<0x1::aptos_coin::AptosCoin>": { - "coin": { - "value": "110000" - }, - "deposit_events": { - "counter": "3", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "2" - } - } - }, - "frozen": false, - "withdraw_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "3" - } - } - } - } - }, - { - "0x1::account::Account": { - "authentication_key": "0x00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "coin_register_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "0" - } - } - }, - "guid_creation_num": "4", - "key_rotation_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xf1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696", - "creation_num": "1" - } - } - }, - "rotation_capability_offer": { - "for": { - "vec": [] - } - }, - "sequence_number": "0", - "signer_capability_offer": { - "for": { - "vec": [] - } - } - } - } - ] -} -``` - -### List the default profile - -You can also list the default profile from configuration with no account specified. - -:::tip -Account addresses may differ from example to example in this section. -::: - -```bash -$ aptos account list -{ - "Result": [ - { - "coin": { - "value": "10000" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "2" - } - } - } - }, - { - "register_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "0" - } - } - } - }, - { - "counter": "3" - }, - { - "authentication_key": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "self_address": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "sequence_number": "0" - } - ] -} -``` - -### Use the name of the profile - -Additionally, any place that takes an account can use the name of a profile: - -```bash -$ aptos account list --query resources --account superuser -{ - "Result": [ - { - "coin": { - "value": "10000" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "creation_num": "2" - } - } - } - }, - { - "register_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "creation_num": "0" - } - } - } - }, - { - "counter": "3" - }, - { - "authentication_key": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "self_address": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "sequence_number": "0" - } - ] -} -``` - -### Listing modules in an account - -You can pass different types of queries to view different items under an account. Currently, 'resources' and -'modules' are supported but more query types are coming. For example, to fetch modules: - -```bash -$ aptos account list --query modules -{ - "Result": [ - { - "bytecode": "0xa11ceb0b050000000b01000a020a12031c2504410405452d0772da0108cc0240068c030a0a9603150cab03650d90040400000101010201030104000506000006080004070700020e0401060100080001000009020300010f0404000410060100031107000002120709010602130a030106050806080105010802020c0a02000103040508020802070801010a0201060c010800010b0301090002070b030109000900074d657373616765056572726f72056576656e74067369676e657206737472696e67124d6573736167654368616e67654576656e740d4d657373616765486f6c64657206537472696e670b6765745f6d6573736167650b7365745f6d6573736167650c66726f6d5f6d6573736167650a746f5f6d657373616765076d657373616765156d6573736167655f6368616e67655f6576656e74730b4576656e7448616e646c65096e6f745f666f756e6404757466380a616464726573735f6f66106e65775f6576656e745f68616e646c650a656d69745f6576656e74b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb0000000000000000000000000000000000000000000000000000000000000001030800000000000000000002020a08020b08020102020c08020d0b030108000001000101030b0a002901030607001102270b002b0110001402010104010105240b0111030c040e0011040c020a02290120030b05120e000b040e00380012012d0105230b022a010c050a051000140c030a050f010b030a04120038010b040b050f0015020100010100", - "abi": { - "address": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "name": "Message", - "friends": [], - "exposed_functions": [ - { - "name": "get_message", - "visibility": "public", - "is_entry": false, - "generic_type_params": [], - "params": [ - "address" - ], - "return": [ - "0x1::string::String" - ] - }, - { - "name": "set_message", - "visibility": "public", - "is_entry": true, - "generic_type_params": [], - "params": [ - "signer", - "vector" - ], - "return": [] - } - ], - "structs": [ - { - "name": "MessageChangeEvent", - "is_native": false, - "abilities": [ - "drop", - "store" - ], - "generic_type_params": [], - "fields": [ - { - "name": "from_message", - "type": "0x1::string::String" - }, - { - "name": "to_message", - "type": "0x1::string::String" - } - ] - }, - { - "name": "MessageHolder", - "is_native": false, - "abilities": [ - "key" - ], - "generic_type_params": [], - "fields": [ - { - "name": "message", - "type": "0x1::string::String" - }, - { - "name": "message_change_events", - "type": "0x1::event::EventHandle<0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb::Message::MessageChangeEvent>" - } - ] - } - ] - } - } - ] -} -``` - -### Transferring coins - -The Aptos CLI is a simple wallet as well, and can transfer coins between accounts. - -```bash -$ aptos account transfer --account superuser --amount 100 -{ - "Result": { - "gas_used": 73, - "balance_changes": { - "742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc": { - "coin": { - "value": "10100" - }, - "deposit_events": { - "counter": "2", - "guid": { - "id": { - "addr": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "0", - "guid": { - "id": { - "addr": "0x742854f7dca56ea6309b51e8cebb830b12623f9c9d76c72c3242e4cad353dedc", - "creation_num": "2" - } - } - } - }, - "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb": { - "coin": { - "value": "9827" - }, - "deposit_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "1" - } - } - }, - "withdraw_events": { - "counter": "1", - "guid": { - "id": { - "addr": "0xb9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "creation_num": "2" - } - } - } - } - }, - "sender": "b9bd2cfa58ca29bce1d7add25fce5c62220604cd0236fe3f90d9de91ed9fb8cb", - "success": true, - "version": 1139, - "vm_status": "Executed successfully" - } -} -``` \ No newline at end of file diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-configuration.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-configuration.md deleted file mode 100644 index 2a245d3fdc1ba..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-configuration.md +++ /dev/null @@ -1,128 +0,0 @@ ---- -title: "Configuration" -id: "cli-configuration" ---- - - -## Configuration examples - -Configuration for the CLI works like this: - -### In the current working directory for local runs - -1. Your configurations are in a **local** YAML configuration file `.aptos/config.yaml`, i.e., located in the current working directory where you run the CLI. In this case you must run your CLI commands from this current working directory for this configuration to be used. -2. You can verify that the CLI is set to use this local configuration YAML file by running the command: - -```bash -aptos config show-global-config -``` - -You should see the below output: - -```bash -{ - "Result": { - "config_type": "Workspace" - } -} -``` - -The `Workspace` value for the `config_type` indicates that the `.aptos/config.yaml` file is used for the CLI configuration. - -### In the home directory for the global runs - -1. Your configurations are in a **global** YAML configuration file `~/.aptos/global_config.yaml`, i.e., located in your home directory. -2. Set the CLI to use this global configuration YAML file by running this command: - -```bash -aptos config set-global-config --config-type global -``` - -You will see the below output: - -``` -{ - "Result": { - "config_type": "Global" - } -} -``` - -You can also show the global configuration with the `show-global-config` command. - -```bash -$ aptos config show-global-config -{ - "Result": { - "config_type": "Global" - } -} -``` - -:::tip Default configuration -If you did not set any global configuration, then the `./.aptos/config.yaml` in the current working directory is used for configuration. -::: - -### Setting up shell completion - -You can set up shell completions with the `generate-shell-completions` command. You can lookup configuration for your specific shell. The supported shells are `[bash, zsh, fish, powershell, elvish]`. An example is below for [`oh my zsh`](https://ohmyz.sh/). - -```bash -aptos config generate-shell-completions --shell zsh --output-file ~/.oh-my-zsh/completions/_aptos -``` - -## Initialize local configuration and create an account - -A local folder named `.aptos/` will be created with a configuration `config.yaml` which can be used to store configuration between CLI runs. This is local to your run, so you will need to continue running CLI from this folder, or reinitialize in another folder. - -### Step 1: Run Aptos init - -The `aptos init` command will initialize the configuration with the private key you provided. -Note: If you would like to initialize a new profile from ledger, please refer to the [Ledger documentation](./use-aptos-ledger.md). - -```bash -$ aptos init -Configuring for profile default -Enter your rest endpoint [Current: None | No input: https://fullnode.devnet.aptoslabs.com] - -No rest url given, using https://fullnode.devnet.aptoslabs.com... -Enter your faucet endpoint [Current: None | No input: https://faucet.devnet.aptoslabs.com] - -No faucet url given, using https://faucet.devnet.aptoslabs.com... -Enter your private key as a hex literal (0x...) [Current: None | No input: Generate new key (or keep one if present)] - -No key given, generating key... -Account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696 doesn't exist, creating it and funding it with 10000 coins -Aptos is now set up for account 00f1f20ddd0b0dd2291b6e42c97274668c479bca70f07c6b6a80b99720779696! Run `aptos help` for more information about commands - -{ - "Result": "Success" -} -``` - -### Step 2: Changing the configuration - -To change the configuration, you can either run the command `aptos init` or you can manually edit the `.aptos/config.yaml` that is in your current working directory. - -### Creating other profiles - -You can also create other profiles for different endpoints and different keys. These can be made by adding the `--profile` argument, and can be used in most other commands to replace command line arguments. - -```bash -$ aptos init --profile superuser -Configuring for profile superuser -Enter your rest endpoint [Current: None | No input: https://fullnode.devnet.aptoslabs.com] - -No rest url given, using https://fullnode.devnet.aptoslabs.com... -Enter your faucet endpoint [Current: None | No input: https://faucet.devnet.aptoslabs.com] - -No faucet url given, using https://faucet.devnet.aptoslabs.com... -Enter your private key as a hex literal (0x...) [Current: None | No input: Generate new key (or keep one if present)] - -No key given, generating key... -Account 18B61497FD290B02BB0751F44381CADA1657C2B3AA6194A00D9BC9A85FAD3B04 doesn't exist, creating it and funding it with 10000 coins -Aptos is now set up for account 18B61497FD290B02BB0751F44381CADA1657C2B3AA6194A00D9BC9A85FAD3B04! Run `aptos help` for more information about commands -{ - "Result": "Success" -} -``` \ No newline at end of file diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-genesis.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-genesis.md deleted file mode 100644 index c920747b71683..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-genesis.md +++ /dev/null @@ -1,127 +0,0 @@ ---- -title: "Genesis" -id: "cli-genesis" ---- - -## Genesis ceremonies - -The `aptos` tool supports bootstrapping new blockchains through what is known as a genesis ceremony. The output of the genesis ceremony is the output of move instructions that prepares a blockchain for online operation. The input consists of: - -- A set of validators and their configuration -- The initial set of Move modules, known as a framework -- A unique `ChainId` (u8) that distinguishes this from other networks -- For test chains, there also exists an account that manages the minting of AptosCoin - -## Generating genesis - -- The genesis organizer constructs a `Layout` and distributes it. -- The genesis organizer prepares the Aptos framework's bytecode and distributes it. -- Each participant generates their `ValidatorConfiguration` and distributes it. -- Each participant generates a `genesis.blob` from the resulting contributions. -- The genesis organizer executes the `genesis.blob` to derive the initial waypoint and distributes it. -- Each participant begins their `aptos-node`. The `aptos-node` verifies upon startup that the `genesis.blob` with the waypoint provided by the genesis organizer. -- The blockchain will begin consensus after a quorum of stake is available. - -### Prepare aptos-core - -The following sections rely on tools from the Aptos source. See [Building Aptos From Source](../../../guides/building-from-source.md) for setup. - -### The `layout` file - -The layout file contains: - -- `root_key`: an Ed25519 public key for AptosCoin management. -- `users`: the set of participants -- `chain_id`: the `ChainId` or a unique integer that distinguishes this deployment from other Aptos networks - -An example: - -``` -root_key: "0xca3579457555c80fc7bb39964eb298c414fd60f81a2f8eedb0244ec07a26e575" -users: - - alice - - bob -chain_id: 8 -``` - -### Building the Aptos Framework - -From your Aptos-core repository, build the framework and package it: - -``` -cargo run --package framework -mkdir aptos-framework-release -cp aptos-framework/releases/artifacts/current/build/**/bytecode_modules/* aptos-framework-release -``` - -The framework will be stored within the `aptos-framework-release` directory. - -### The `ValidatorConfiguration` file - -The `ValidatorConfiguration` file contains: - -- `account_address`: The account that manages this validator. This must be derived from the `account_key` provided within the `ValidatorConfiguration` file. -- `consensus_key`: The public key for authenticating consensus messages from the validator -- `account_key`: The public key for the account that manages this validator. This is used to derive the `account_address`. -- `network_key`: The public key for both validator and fullnode network authentication and encryption. -- `validator_host`: The network address where the validator resides. This contains a `host` and `port` field. The `host` should either be a DNS name or an IP address. Currently only IPv4 is supported. -- `full_node_host`: An optional network address where the fullnode resides. This contains a `host` and `port` field. The `host` should either be a DNS name or an IP address. Currently only IPv4 is supported. -- `stake_amount`: The number of coins being staked by this node. This is expected to be `1`, if it is different the configuration will be considered invalid. - -An example: - -``` -account_address: ccd49f3ea764365ac21e99f029ca63a9b0fbfab1c8d8d5482900e4fa32c5448a -consensus_key: "0xa05b8f41057ac72f9ca99f5e3b1b787930f03ba5e448661f2a1fac98371775ee" -account_key: "0x3d15ab64c8b14c9aab95287fd0eb894aad0b4bd929a5581bcc8225b5688f053b" -network_key: "0x43ce1a4ac031b98bb1ee4a5cd72a4cca0fd72933d64b22cef4f1a61895c2e544" -validator_host: - host: bobs_host - port: 6180 -full_node_host: - host: bobs_host - port: 6182 -stake_amount: 1 -``` - -To generate this using the `aptos` CLI: - -1. Generate your validator's keys: - -``` -cargo run --package aptos -- genesis generate-keys --output-dir bobs -``` - -2. Generate your `ValidatorConfiguration`: - -``` -cargo run --package aptos -- \\ - genesis set-validator-configuration \\ - --keys-dir bobs \\ - --username bob \\ - --validator-host bobs_host:6180 \\ - --full-node-host bobs_host:6180 \\ - --local-repository-dir . -``` - -3. The last command will produce a `bob.yaml` file that should be distributed to other participants for `genesis.blob` generation. - -### Generating a genesis and waypoint - -`genesis.blob` and the waypoint can be generated after obtaining the `layout` file, each of the individual `ValidatorConfiguration` files, and the framework release. It is important to validate that the `ValidatorConfiguration` provided in the earlier stage is the same as in the distribution for generating the `genesis.blob`. If there is a mismatch, inform all participants. - -To generate the `genesis.blob` and waypoint: - -- Place the `layout` file in a directory, e.g., `genesis`. -- Place all the `ValidatorConfiguration` files into the `genesis` directory. -- Ensure that the `ValidatorConfiguration` files are listed under the set of `users` within the `layout` file. -- Make a `framework` directory within the `genesiss` directory and place the framework release `.mv` files into the `framework` directory. -- Use the `aptos` CLI to generate genesis and waypoint: - -``` -cargo run --package aptos -- genesis generate-genesis --local-repository-dir genesis -``` - -### Starting an `aptos-node` - -Upon generating the `genesis.blob` and waypoint, place them into your validator and fullnode's configuration directory and begin your validator and fullnode. diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-key.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-key.md deleted file mode 100644 index 907138dff0587..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-key.md +++ /dev/null @@ -1,107 +0,0 @@ ---- -title: "Key" -id: "cli-key" ---- - -## Key examples - -### Generating a key - -To allow generating private keys, you can use the `aptos key generate` command. You can generate -either `x25519` or `ed25519` keys. - -```bash -$ aptos key generate --key-type ed25519 --output-file output.key -{ - "Result": { - "PrivateKey Path": "output.key", - "PublicKey Path": "output.key.pub" - } -} -``` - -### Generating a vanity prefix key - -If you are generating an `ed25519` key, you can optionally supply a vanity prefix for the corresponding account address: - -```bash -$ aptos key generate --output-file starts_with_ace.key --vanity-prefix 0xace -{ - "Result": { - "PrivateKey Path": "starts_with_ace.key", - "PublicKey Path": "starts_with_ace.key.pub", - "Account Address:": "0xaceffa015e51dcd32c34794c143e19185b3f1be5464dd6184239a37e57e72ea3" - } -} -``` - -This works for multisig accounts too: - -```bash -% aptos key generate --output-file starts_with_bee.key --vanity-prefix 0xbee --vanity-multisig -{ - "Result": { - "PrivateKey Path": "starts_with_bee.key", - "PublicKey Path": "starts_with_bee.key.pub", - "Account Address:": "0x384cf987aab625f9727684d4dda8de668abedc18aa8dceabd7651a1cfb69196f", - "Multisig Account Address:": "0xbee0797c577428249125f6ed7f4a2a5939ddc34389294bd9f5d1627508832f56" - } -} -``` - -Note the vanity flag documentation from the `aptos key generate` help: - -``` ---vanity-multisig - Use this flag when vanity prefix is for a multisig account. This mines a private key for - a single signer account that can, as its first transaction, create a multisig account - with the given vanity prefix - ---vanity-prefix - Vanity prefix that resultant account address should start with, e.g. 0xaceface or d00d. - Each additional character multiplies by a factor of 16 the computational difficulty - associated with generating an address, so try out shorter prefixes first and be prepared - to wait for longer ones -``` - -:::tip -If you want even faster vanity address generation for long prefixes, try out the parallelism-optimized [`optivanity`](https://github.com/econia-labs/optivanity) tool from [Econia Labs](https://www.econialabs.com/) -::: - -### Generating a peer config - -To allow others to connect to your node, you need to generate a peer configuration. Below command shows how you can use -the `aptos` CLI to generate a peer configuration and write it into a file named `peer_config.yaml`. - -```bash -$ aptos key extract-peer --output-file peer_config.yaml -``` - -The above command will generate the following output on the terminal: - -```bash -{ - "Result": { - "8cfb85603080b13013b57e2e80887c695cfecd7ad8217d1cac22fa6f3b0b5752": { - "addresses": [], - "keys": [ - "0x8cfb85603080b13013b57e2e80887c695cfecd7ad8217d1cac22fa6f3b0b5752" - ], - "role": "Upstream" - } - } -} -``` - -The `peer_config.yaml` file will be created in your current working directory, with the contents as shown in the below example: - -```bash ---- -8cfb85603080b13013b57e2e80887c695cfecd7ad8217d1cac22fa6f3b0b5752: - addresses: [] - keys: - - "0x8cfb85603080b13013b57e2e80887c695cfecd7ad8217d1cac22fa6f3b0b5752" - role: Upstream -``` - -**Note:** In the addresses key, you should fill in your address. \ No newline at end of file diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-node.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-node.md deleted file mode 100644 index 416997bd7d1b5..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/cli-node.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -title: "Node" -id: "cli-node" ---- - -## Node command examples - -For Aptos CLI commands applicable to validator nodes, see the [Owner](../../../nodes/validator-node/operator/staking-pool-operations.md#owner-operations-with-cli) and [Voter](../../../nodes/validator-node/voter/index.md#steps-using-aptos-cli) instructions. - -### Running a local testnet - -To learn how to run a local testnet with the Aptos CLI see this guide: [Run a Local Development Network with the CLI](../../../guides/local-development-network.md). diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-cli.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-cli.md deleted file mode 100644 index e462167fad702..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-cli.md +++ /dev/null @@ -1,192 +0,0 @@ ---- -title: "Use the Aptos CLI" -id: "use-aptos-cli" ---- - -# Use the Aptos CLI - -The `aptos` tool is a command line interface (CLI) for developing on the Aptos blockchain, debugging, and for node operations. This document describes how to use the `aptos` CLI tool. To download or build the CLI, follow [Install Aptos CLI](../install-cli/index.md). - -For example on how to use specific commands, see the following documents: -- [Configuration and Initialization](./cli-configuration.md) -- [Account](./cli-account.md) -- [Key](./cli-key.md) -- [Node](./cli-node.md) -- [Move](../../../move/move-on-aptos/cli.md) -- [Genesis](./cli-genesis.md) - -## Command line help - -Command line help is available. Type `aptos help` or `aptos --help` to see the available command options. See below the usage output from `aptos --help`: - -```bash -USAGE: - aptos - -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - account Tool for interacting with accounts - config Tool for interacting with configuration of the Aptos CLI tool - genesis Tool for setting up an Aptos chain Genesis transaction - governance Tool for on-chain governance - help Print this message or the help of the given subcommand(s) - info Show build information about the CLI - init Tool to initialize current directory for the aptos tool - key Tool for generating, inspecting, and interacting with keys - move Tool for Move related operations - multisig Tool for interacting with multisig accounts - node Tool for operations related to nodes - stake Tool for manipulating stake and stake pools - update Update the CLI itself -``` - -### Command-specific help - -Command-specific help is also available. For example, see below the usage output from `aptos move --help`: - -```bash - -USAGE: - aptos move - -OPTIONS: - -h, --help Print help information - -V, --version Print version information - -SUBCOMMANDS: - build-publish-payload - Build a publication transaction payload and store it in a JSON output file - clean - Cleans derived artifacts of a package - compile - Compiles a package and returns the associated ModuleIds - compile-script - Compiles a Move script into bytecode - coverage - Computes coverage for a package - create-resource-account-and-publish-package - Publishes the modules in a Move package to the Aptos blockchain under a resource account - disassemble - Disassemble the Move bytecode pointed to - document - Documents a Move package - download - Downloads a package and stores it in a directory named after the package - help - Print this message or the help of the given subcommand(s) - init - Creates a new Move package at the given location - list - Lists information about packages and modules on-chain for an account - prove - Proves a Move package - publish - Publishes the modules in a Move package to the Aptos blockchain - run - Run a Move function - run-script - Run a Move script - test - Runs Move unit tests for a package - verify-package - Downloads a package and verifies the bytecode - view - Run a view function -``` - -### Sub-command help - -Help for sub-commands is also available. For example, see below the usage output from `aptos move compile --help`: - -```bash - -Usage: aptos move compile [OPTIONS] - -Options: - --save-metadata - Save the package metadata in the package's build directory - - If set, package metadata should be generated and stored in the package's build directory. This metadata can be used to construct a transaction to publish a package. - - --included-artifacts - Artifacts to be generated when building the package - - Which artifacts to include in the package. This can be one of `none`, `sparse`, and `all`. `none` is the most compact form and does not allow to reconstruct a source package from chain; `sparse` is the minimal set of artifacts needed to reconstruct a source package; `all` includes all available artifacts. The choice of included artifacts heavily influences the size and therefore gas cost of publishing: `none` is the size of bytecode alone; `sparse` is roughly 2 times as much; and `all` 3-4 as much. - - [default: sparse] - [possible values: none, sparse, all] - - --dev - Enables dev mode, which uses all dev-addresses and dev-dependencies - - Dev mode allows for changing dependencies and addresses to the preset [dev-addresses] and [dev-dependencies] fields. This works both inside and out of tests for using preset values. - - Currently, it also additionally pulls in all test compilation artifacts - - --package-dir - Path to a move package (the folder with a Move.toml file) - - --output-dir - Path to save the compiled move package - - Defaults to `/build` - - --named-addresses - Named addresses for the move binary - - Example: alice=0x1234, bob=0x5678 - - Note: This will fail if there are duplicates in the Move.toml file remove those first. - - [default: ] - - --skip-fetch-latest-git-deps - Skip pulling the latest git dependencies - - If you don't have a network connection, the compiler may fail due to no ability to pull git dependencies. This will allow overriding this for local development. - - --bytecode-version - Specify the version of the bytecode the compiler is going to emit - - --compiler-version - Specify the version of the compiler - - [possible values: v1, v2] - - --skip-attribute-checks - Do not complain about unknown attributes in Move code - - -h, --help - Print help (see a summary with '-h') - - -V, --version - Print version -``` - -## CLI information - -Run the `aptos info` command to get the CLI information for debugging purposes. See an example output of the `aptos info` command: - -```bash -{ - "Result": { - "build_branch": "", - "build_cargo_version": "cargo 1.71.2 (1a737af0c 2023-08-07)", - "build_clean_checkout": "true", - "build_commit_hash": "", - "build_is_release_build": "true", - "build_os": "macos-aarch64", - "build_pkg_version": "2.1.0", - "build_profile_name": "cli", - "build_rust_channel": "", - "build_rust_version": "rustc 1.71.1 (eb26296b5 2023-08-03) (built from a source tarball)", - "build_tag": "", - "build_time": "2023-08-24 21:13:40 +00:00", - "build_using_tokio_unstable": "true" - } -} -``` - diff --git a/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-ledger.md b/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-ledger.md deleted file mode 100644 index 32e76e68f5087..0000000000000 --- a/developer-docs-site/docs/tools/aptos-cli/use-cli/use-aptos-ledger.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -title: "Use Aptos CLI with Ledger" -id: "use-aptos-ledger" ---- - -# Use the Aptos CLI with Ledger - -The `aptos` tool can be used with your Ledger device to sign any transaction. This is the most secure way to sign transactions, as your private key never leaves your device. - -NOTE: It is highly recommended to use `Ledger Nano S Plus` or `Ledger Nano X` devices. The `Ledger Nano S` device has very limited memory and may not be able to sign most of the transactions. If you are trying to sign a transaction that is too big for your device to handle, you will get an error `Wrong raw transaction length`. - -## Blind Signing - -Before you begin, make sure you have `Blind Signing` enabled on your Ledger device. Otherwise you will not be able to sign transactions. -`Blind Signing` - confirming a smart contract interaction you can’t verify through a human readable language. - -## Create a new Ledger profile - -In order to interact with your Ledger device, you must first create a new profile. This profile will be used to store your Ledger public key, and will be used to sign transactions. - -```bash -$ aptos init --profile myprofile --ledger -Configuring for profile myprofile -Choose network from [devnet, testnet, mainnet, local, custom | defaults to devnet] - -No network given, using devnet... -Please choose an index from the following 5 ledger accounts, or choose an arbitrary index that you want to use: -[0] Derivation path: m/44'/637'/0'/0'/0' (Address: 59836ba1dd0c845713bdab34346688d6f1dba290dbf677929f2fc20593ba0cfb) -[1] Derivation path: m/44'/637'/1'/0'/0' (Address: 21563230cf6d69ee72a51d21920430d844ee48235e708edbafbc69708075a86e) -[2] Derivation path: m/44'/637'/2'/0'/0' (Address: 667446181b3b980ef29f5145a7a2cc34d433fc3ee8c97fc044fd978435f2cb8d) -[3] Derivation path: m/44'/637'/3'/0'/0' (Address: 2dcf037a9f31d93e202c074229a1b69ea8ee4d2f2d63323476001c65b0ec4f31) -[4] Derivation path: m/44'/637'/4'/0'/0' (Address: 23c579a9bdde1a59f1c9d36d8d379aeefe7a5997b5b58bd5a5b0c12a4f170431) -0 -Account 59836ba1dd0c845713bdab34346688d6f1dba290dbf677929f2fc20593ba0cfb has been already found onchain - ---- -Aptos CLI is now set up for account 59836ba1dd0c845713bdab34346688d6f1dba290dbf677929f2fc20593ba0cfb as profile myprofile! Run `aptos --help` for more information about commands -{ - "Result": "Success" -} -``` -In the above, we have created a new profile called `myprofile` and have chosen to use the first Ledger account (index 0) to sign transactions. If there is a certain index account you would like to use, you are welcome to use it. - - -After the above command, a new profile will be created in `~/.aptos/config.yml` and will look like the following: -```yaml - myprofile: - public_key: "0x05a8ace09d1136181029be3e817de3619562b0da2eedbff210e2b2f92c71be70" - account: 59836ba1dd0c845713bdab34346688d6f1dba290dbf677929f2fc20593ba0cfb - rest_url: "https://fullnode.devnet.aptoslabs.com" - faucet_url: "https://faucet.devnet.aptoslabs.com" - derivation_path: "m/44'/637'/0'/0'/0'" -``` -Notice that the above stores the derivation path instead of private key. This is because the private key is stored on your Ledger device, and is never exposed to the `aptos` tool. - -## Publish a package with Ledger -Once you have created a profile, you can use it to publish a package. The `aptos` tool will prompt you to confirm the transaction on your Ledger device. -Note: Make sure that you are on the same directory as where your move module is located: -```bash -$ aptos move publish --profile myprofile --named-addresses hello_blockchain=myprofile -Compiling, may take a little while to download git dependencies... -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING Examples -package size 1755 bytes -Do you want to submit a transaction for a range of [139600 - 209400] Octas at a gas unit price of 100 Octas? [yes/no] > -yes -{ - "Result": { - "transaction_hash": "0xd5a12594f85284cfd5518d547d084030b178ee926fa3d8cbf699cc0596eff538", - "gas_used": 1396, - "gas_unit_price": 100, - "sender": "59836ba1dd0c845713bdab34346688d6f1dba290dbf677929f2fc20593ba0cfb", - "sequence_number": 0, - "success": true, - "timestamp_us": 1689887104333038, - "version": 126445, - "vm_status": "Executed successfully" - } -} -``` - -After the above command, you will be prompted to confirm the transaction on your Ledger device. Once you confirm, the transaction will be submitted to the network. Note: Make sure you have `Blind Signing` enabled on your Ledger device. Otherwise you will not be able to sign transactions. -`Blind Signing` - confirming a smart contract interaction you can’t verify through a human readable language. - -## Common Errors - -### Error: Wrong raw transaction length -Your raw transaction or package size is too big. Currently the Aptos ledger app can only support up to 20kb transaction. If you are using a `Ledger Nano S`, the supported transaction size will be even smaller. -```bash -{ - "Error": "Unexpected error: Error - Wrong raw transaction length" -} -``` - -### Error: Ledger device is locked -Make sure your Ledger device is unlocked and you have Aptos app opened -```bash -{ - "Error": "Unexpected error: Error - Ledger device is locked" -} -``` \ No newline at end of file diff --git a/developer-docs-site/docs/tutorials/_category_.json b/developer-docs-site/docs/tutorials/_category_.json deleted file mode 100644 index 9fc1e56d3c6c5..0000000000000 --- a/developer-docs-site/docs/tutorials/_category_.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "label": "Tutorials", - "position": 3 -} diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/1-create-smart-contract.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/1-create-smart-contract.md deleted file mode 100644 index afea5119dc1b0..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/1-create-smart-contract.md +++ /dev/null @@ -1,604 +0,0 @@ ---- -title: "1. Create a Smart Contract" -id: "create-a-smart-contract" ---- - -# 1. Create a Smart Contract - -This is the first chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md). If you haven’t done it, review that introduction, and ensure your environment meets the [prerequisites](../../tools/aptos-cli/install-cli/index.md) listed there. - -Now that you are all set up and at your terminal: - -1. `cd` into the `my-first-dapp` root directory, and create a new `move` directory. -2. `cd` into the new `move` directory and run: `aptos move init --name my_todo_list` - That command creates a `sources/` directory and `Move.toml` file inside the `move` directory. -3. Your new `move` directory should now resemble: - - ![move-directory](../../../static/img/docs/build-e2e-dapp-img-1.png) - -### What is a `Move.toml` file? - -A `Move.toml` file is a manifest file that contains metadata such as name, version, and dependencies for the package. - -Take a look at the new `Move.toml` file. You should see your package information and an `AptosFramework` dependency. Note that the `name` property is the same `--name` attribute we passed to the `aptos move init` command before. The `AptosFramework` dependency points to the `aptos-core/aptos-move/framework/aptos-framework` GitHub repo main branch. - -### Why `sources` directory? - -The `sources` directory holds a collection of `.move` modules files. And later when we want to compile the package using the CLI, the compiler will look for that `sources` directory and its `Move.toml` file. - -### Create a Move module - -An account is needed to publish a Move module. So first we need to create an account. Once we have the account's private key, we can create a module under its account address and publish the module using that account. - -1. In our `move` directory, run `aptos init --network devnet`. Press enter when prompted. - - This creates for us a `.aptos` directory with a `config.yaml` file that holds our profile information. In the `config.yaml` file, we now have our profiles list that holds a `default` profile. If you open that file, you will see content resembling: - - ```yaml - profiles: - default: - private_key: "0xee8f387ef0b4bb0018c4b91d1c0f71776a9b85935b4c6ec2823d6c0022fbf5cb" - public_key: "0xc6c07218d79a806380ca67761905063ec7a78d41f79619f4562462a0f8b6be11" - account: cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018 - rest_url: "https://fullnode.devnet.aptoslabs.com" - faucet_url: "https://faucet.devnet.aptoslabs.com" - ``` - - From now on, whenever we run a CLI command in this `move` directory, it will run with that default profile. - We use the `devnet` network flag so eventually when we publish our package it will get published to the `devnet` network. - - :::tip - You just created a new account on the Aptos (dev) network! Yay! You can see it by going to the [Aptos Explorer](https://explorer.aptoslabs.com/?network=devnet) Devnet network view, pasting the `account` address value from your configuration file into the search field, and clicking on the dropdown option! - ::: - -As mentioned, our `sources` directory holds our `.move` module files; so let’s add our first Move file. - -2. Open the `Move.toml` file. -3. Add the following code to that Move file, substituting your actual default profile account address from `.aptos/config.yaml`: - -```toml -[addresses] -todolist_addr='' -``` - -If the default profile account address is `cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018`, your `Move.toml` file should look like: - -```toml -[addresses] -todolist_addr='cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018' -``` - -4. Create a new `todolist.move` file within the `sources` directory and add the following to that file: - -```rust -module todolist_addr::todolist { - -} -``` - -:::tip -A Move module is stored under an address (so when it published anyone can access it using that address); the syntax for a Move module is - -```rust -module :: { - -} -``` - -In our module, the `account-address` is `todolist_addr` (a variable we just declared on the `Move.toml` file in the previous step that holds an `address`), and the `module-name` is `todolist` (a random name we selected). -::: - -### Our contract logic - -Before jumping into writing code, let’s first understand what we want our smart contract program to do. For ease of understanding, we will keep the logic pretty simple: - -1. An account creates a new list. -2. An account creates a new task on their list. - - Whenever someone creates a new task, emit a `task_created` event. -3. Let an account mark their task as completed. - -:::tip -Creating an event is not mandatory yet useful if dapps/users want to monitor data, such as how many people create a new task, using the [Aptos Indexer](../../indexer/indexer-landing.md). -::: - -We can start with defining a `TodoList` struct, that holds the: - -- tasks array -- new task event -- a task counter that counts the number of created tasks (we can use that to differentiate between the tasks) - -And also create a `Task` struct that holds: - -- the task ID - derived from the TodoList task counter. -- address - the account address who created that task. -- content - the task content. -- completed - a boolean that marks whether that task is completed or not. - -On the `todolist.move` file, update the content in the module with: - -```rust -... -struct TodoList has key { - tasks: Table, - set_task_event: event::EventHandle, - task_counter: u64 - } - -struct Task has store, drop, copy { - task_id: u64, - address:address, - content: String, - completed: bool, - } -... -``` - -**What did we just add?** - -**TodoList** - -A struct that has the `key` and `store` abilities: - -- `Key` ability allows struct to be used as a storage identifier. In other words, `key` -  is an ability to be stored at the top-level and act as a storage. We need it here to have `TodoList` be a resource stored in our user account. - -When a struct has the `key` ability, it turns this struct into a `resource`: - -- `Resource` is stored under the account - therefore it *exists* only when assigned to an account and can be *accessed* through this account only. - -**Task** - -A struct that has the `store`, `drop` and `copy`abilities. - -• `Store` - Task needs `Store` as it’s stored inside another struct (TodoList) - -• `Copy` - value can be *copied* (or cloned by value). - -• `Drop` - value can be *dropped* by the end of scope. - -Let’s try to compile what we have now: - -1. `cd` into the `move` directory. -2. Run: `aptos move compile` - -**Seeing errors?!** Let’s understand them. - -We have some errors on `Unbound type`- this is happening because we used some types but never imported them, and the compiler doesn't know where to get them from. - -3. On the top of the module, import those types by adding: - -```rust -... -use aptos_framework::event; -use std::string::String; -use aptos_std::table::Table; -... -``` - -That will tell the compiler where it can get those types from. - -4. Run the `aptos move compile` command again; If all goes well, we should see a response resembling (where the resulting account address is your default profile account address): - -```rust -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING myTodolist -{ -"Result": [ - "cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018::todolist" - ] -} -``` - -At this point, we have successfully compiled our Move module. Yay! - -We also have a new `move/build` directory (created by the compiler) that holds our compiled modules, build information and `sources` directory. - -### Create list function - -The first thing an account can and should do with our contract is create a new list. - -Creating a list is essentially submitting a transaction, and so we need to know the `signer` who signed and submitted the transaction: - -1. Add a `create_list` function that accepts a `signer` - -```rust -public entry fun create_list(account: &signer){ - -} -``` - -**Let’s understand the components of this function** - -- `entry` - an _entry_ function is a function that can be called via transactions. Simply put, whenever you want to submit a transaction to the chain, you should call an entry function. - -- `&signer` - The **signer** argument is injected by the Move VM as the address who signed that transaction. - -Our code has a `TodoList` resource. Resource is stored under the account; therefore, it *exists* only when assigned to an account and can be *accessed* only through this account. - -That means to create the `TodoList` resource, we need to assign it to an account that only this account can have access to. - -The `create_list` function can handle that `TodoList` resource creation. - -2. Add the following to the `create_list` function - -```rust -public entry fun create_list(account: &signer){ - let tasks_holder = TodoList { - tasks: table::new(), - set_task_event: account::new_event_handle(account), - task_counter: 0 - }; - // move the TodoList resource under the signer account - move_to(account, tasks_holder); -} -``` - -This function takes in a `signer`, creates a new `TodoList` resource, and uses `move_to` to have the resource stored in the provided signer account. - -### Create task function - -As mentioned before, our contract has a create task function that lets an account create a new task. Creating a task is also essentially submitting a transaction, and so we need to know the `signer` who signed and submitted the transaction. Another element we want to accept in our function is the task `content`. - -1. Add a `create_task` function that accepts a `signer` and task `content` and the function logic. - -```rust -public entry fun create_task(account: &signer, content: String) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - // gets the TodoList resource - let todo_list = borrow_global_mut(signer_address); - // increment task counter - let counter = todo_list.task_counter + 1; - // creates a new Task - let new_task = Task { - task_id: counter, - address: signer_address, - content, - completed: false - }; - // adds the new task into the tasks table - table::upsert(&mut todo_list.tasks, counter, new_task); - // sets the task counter to be the incremented counter - todo_list.task_counter = counter; - // fires a new task created event - event::emit_event( - &mut borrow_global_mut(signer_address).set_task_event, - new_task, - ); - } -``` - -2. Since we now use two new modules - signer and table (you can see it being used in `signer::` and `table::`) - we need to import these modules. - At the top of the file, add those two use statements: - -```rust -use std::signer; -use aptos_std::table::{Self, Table}; // This one we already have, need to modify it -``` - -**Back to the code; what is happening here?** - -- First, we want to get the signer address so we can get this account’s `TodoList` resource. -- Then, we retrieve the `TodoList` resource with the `signer_address`; with that we have access to the `TodoList` properties. -- We can now increment the `task_counter` property, and create a new `Task` with the `signer_address`, `counter` and the provided `content`. -- We push it to the `todo_list.tasks` table that holds all of our tasks along with the new `counter` (which is the table key) and the newly created Task. -- Then we assign the global `task_counter` to be the new incremented counter. -- Finally, we emit the `task_created` event that holds the new Task data. `emit_event` is an `aptos-framework` function that accepts a reference to the event handle and a message. In our case, we are passing the function a reference (using the sign &) to the account’s `TodoListresource` `set_task_event` property as the first argument and a second message argument which is the new Task we just created. Remember, we have a `set_task_event` property in our `TodoList` struct. - -### Complete task function - -Another function we want our contract to hold is the option to mark a task as completed. - -1. Add a `complete_task` function that accepts a `signer` and a `task_id`: - -```rust -public entry fun complete_task(account: &signer, task_id: u64) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - // gets the TodoList resource - let todo_list = borrow_global_mut(signer_address); - // gets the task matches the task_id - let task_record = table::borrow_mut(&mut todo_list.tasks, task_id); - // update task as completed - task_record.completed = true; -} -``` - -**Let’s understand the code.** - -- As before in our create list function, we retrieve the `TodoList` struct by the signer address so we can have access to the tasks table that holds all of the account tasks. -- Then, we look for the task with the provided `task_id` on the `todo_list.tasks` table. -- Finally, we update that task completed property to be true. - -Now try to compile the code: - -2. Run: `aptos move compile` -3. Another `Unbound` error? To fix this, add a `use` statement to use the `account` module. - -```rust -use aptos_framework::account; -``` - -4. run `aptos move compile` again. - -### Add validations - -As this code now compiles, we want to have some validations and checks before creating a new task or updating the task as completed so we can be sure our functions work as expected. - -1. Add a check to the `create_task` function to make sure the signer account has a list: - -```rust -public entry fun create_task(account: &signer, content: String) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - - // assert signer has created a list - assert!(exists(signer_address), 1); - - ... -} -``` - -1. Add a check to the `complete_task` function to make sure the: - - signer has created a list. - - task exists. - - task is not completed. - -With: - -```rust -public entry fun complete_task(account: &signer, task_id: u64) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - // assert signer has created a list - assert!(exists(signer_address), 1); - // gets the TodoList resource - let todo_list = borrow_global_mut(signer_address); - // assert task exists - assert!(table::contains(&todo_list.tasks, task_id), 2); - // gets the task matched the task_id - let task_record = table::borrow_mut(&mut todo_list.tasks, task_id); - // assert task is not completed - assert!(task_record.completed == false, 3); - // update task as completed - task_record.completed = true; -} -``` - -We just added our first `assert` statements! - -If you noticed, `assert` accepts two arguments: the first is what to check for, and the second is an error code. Instead of passing in an arbitrary number, a convention is to declare `errors` on the top of the module file and use these instead. - -On the top of the module file (under the `use` statements), add those error declarations: - -```rust -// Errors -const E_NOT_INITIALIZED: u64 = 1; -const ETASK_DOESNT_EXIST: u64 = 2; -const ETASK_IS_COMPLETED: u64 = 3; -``` - -Now we can update our asserts with these constants: - -```rust -public entry fun create_task(account: &signer, content: String) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - - // assert signer has created a list - assert!(exists(signer_address), E_NOT_INITIALIZED); - - ... -} - - - -public entry fun complete_task(account: &signer, task_id: u64) acquires TodoList { - // gets the signer address - let signer_address = signer::address_of(account); - assert!(exists(signer_address), E_NOT_INITIALIZED); - // gets the TodoList resource - let todo_list = borrow_global_mut(signer_address); - // assert task exists - assert!(table::contains(&todo_list.tasks, task_id), ETASK_DOESNT_EXIST); - // gets the task matched the task_id - let task_record = table::borrow_mut(&mut todo_list.tasks, task_id); - // assert task is not completed - assert!(task_record.completed == false, ETASK_IS_COMPLETED); - // update task as completed - task_record.completed = true; -} -``` - -**WONDERFUL!!** - -Let’s stop for one moment and make sure our code compiles by running the `aptos move compile` command. If all goes well, we should output resembling: - -```rust -INCLUDING DEPENDENCY AptosFramework -INCLUDING DEPENDENCY AptosStdlib -INCLUDING DEPENDENCY MoveStdlib -BUILDING myTodolist -{ -"Result": [ - "cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018::todolist" - ] -} -``` - -If you encounter errors, make sure you followed the steps above correctly and try to determine the cause of the issues. - -### Write tests - -Now that we have our smart contract logic ready, we need to add some tests for it. - -Test functions use the `#[test]` annotation. - -1. Add the following code to the bottom of the file: - -```rust -#[test] -public entry fun test_flow() { - -} -``` - -:::tip -we need to use `entry` here because we are testing an `entry` function. -::: - -2. For simplicity, and because we don't have much code to test, we use one function to test the whole flow of the app. - The test steps are: - -``` - // create a list - // create a task - // update task as completed -``` - -Update the test function to be: - -```rust -#[test(admin = @0x123)] -public entry fun test_flow(admin: signer) acquires TodoList { - // creates an admin @todolist_addr account for test - account::create_account_for_test(signer::address_of(&admin)); - // initialize contract with admin account - create_list(&admin); - - // creates a task by the admin account - create_task(&admin, string::utf8(b"New Task")); - let task_count = event::counter(&borrow_global(signer::address_of(&admin)).set_task_event); - assert!(task_count == 1, 4); - let todo_list = borrow_global(signer::address_of(&admin)); - assert!(todo_list.task_counter == 1, 5); - let task_record = table::borrow(&todo_list.tasks, todo_list.task_counter); - assert!(task_record.task_id == 1, 6); - assert!(task_record.completed == false, 7); - assert!(task_record.content == string::utf8(b"New Task"), 8); - assert!(task_record.address == signer::address_of(&admin), 9); - - // updates task as completed - complete_task(&admin, 1); - let todo_list = borrow_global(signer::address_of(&admin)); - let task_record = table::borrow(&todo_list.tasks, 1); - assert!(task_record.task_id == 1, 10); - assert!(task_record.completed == true, 11); - assert!(task_record.content == string::utf8(b"New Task"), 12); - assert!(task_record.address == signer::address_of(&admin), 13); -} -``` - -Our `#[test]` annotation has changed and declares an account variable. - -Additionally, the function itself now accepts a signer argument. - -**Let’s understand our tests.** - -Since our tests run outside of an account scope, we need to _create_ accounts to use in our tests. The `#[test]` annotation gives us the option to declare those accounts. We use an `admin` account and set it to a random account address (`@0x123`). The function accepts this signer (account) and creates it by using a built-in function to create an account for test. - -Then we simply go through the flow by: - -- creating a list -- creating a task -- updating a task as completed - -And assert the expected data/behavior at each step. - -Before running the tests again, we need to import (`use`) some new modules we are now employing in our code: - -3. At the top of the file, add this `use` statement: - -```rust -use std::string::{Self, String}; // already have it, need to modify -``` - -4. Run the `aptos move test` command. If all goes right, we should see a success message like: - -```rust -Running Move unit tests -[ PASS ] 0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018::todolist::test_flow -Test result: OK. Total tests: 1; passed: 1; failed: 0 -{ - "Result": "Success" -} -``` - -5. Let’s add one more test to make sure our `complete_task` function works as expected. Add another test function with: - -```rust -#[test(admin = @0x123)] -#[expected_failure(abort_code = E_NOT_INITIALIZED)] -public entry fun account_can_not_update_task(admin: signer) acquires TodoList { - // creates an admin @todolist_addr account for test - account::create_account_for_test(signer::address_of(&admin)); - // account can not toggle task as no list was created - complete_task(&admin, 2); -} -``` - -This test confirms that an account can’t use that function if they haven’t created a list before. - -The test also uses a special annotation `#[expected_failure]` that, as the name suggests, expects to fail with an `E_NOT_INITIALIZED` error code. - -6. Run the `aptos move test` command. If all goes right, we should see a success message like: - -```rust -Running Move unit tests -[ PASS ] 0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018::todolist::account_can_not_update_task -[ PASS ] 0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018::todolist::test_flow -Test result: OK. Total tests: 2; passed: 2; failed: 0 -{ - "Result": "Success" -} -``` - -Now that everything works, we can compile the Move modules and publish the Move package to chain so our React app (and everyone else) can interact with our smart contract! - -### Publish todolist module to chain - -For now, the easiest way to publish a Move package to chain is using the CLI: - -1. `cd` into our `move` directory, and run: `aptos move compile` - -We are getting some _Unused alias_ errors. This is because we added the `string` alias before since we use it in our tests. But we don't use this alias in our smart contract code. - -This is why we are getting this error when we want to compile the module but not are getting it when we only run tests. - -To fix it, we can add a `use` statement that would be used only in tests. - -Add the following `use` statement where we have all of our import statements. - -```rust -use std::string::String; // change to this -... -#[test_only] -use std::string; // add this -``` - -2. Run: `aptos move test` and `aptos move compile` - all should work without errors. -3. Run: `aptos move publish` -4. Enter `yes` in the prompt. -5. That will compile, simulate and finally publish your module into devnet. You should see a success message: - -```rust -{ - "Result": { - "transaction_hash": "0x96b84689a53a28db7be6346627a99967f719946bc22766811a674e69da7783fa", - "gas_used": 7368, - "gas_unit_price": 100, - "sender": "cbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018", - "sequence_number": 2, - "success": true, - "timestamp_us": 1674246585276143, - "version": 651327, - "vm_status": "Executed successfully" - } -} -``` - -6. You can now head to the [Aptos Explorer](https://explorer.aptoslabs.com/), change the dropdown on the top right to the _Devnet_ network and look for that `transaction_hash` value - this will show you the transaction details. - -Now let's [set up a React app](./2-set-up-react-app.md) in chapter 2. diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/2-set-up-react-app.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/2-set-up-react-app.md deleted file mode 100644 index 513892aa9500a..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/2-set-up-react-app.md +++ /dev/null @@ -1,89 +0,0 @@ ---- -title: "2. Set up React App" -id: "set-up-react-app" ---- - -# 2. Set up React App - -This is the second chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md) where you have already [created a smart contract](./1-create-smart-contract.md) and are now setting up a React app. - -## Set up the app - -We will use the `react` library to build the client side with [Create React App](https://create-react-app.dev/docs/getting-started#creating-an-app). - -For the UI, we will use [Ant Design](https://ant.design/). This is just a personal decision; you are welcome to use any different UI library/framework you wish. - -1. In the root folder of the `my-first-dapp` project, run: - -```js -npx create-react-app client --template typescript -``` - -That will create a new `client` folder in the current path: - -2. Your file structure should look something like: - - ![client-folder](../../../static/img/docs/build-e2e-dapp-img-2.png) - -3. Run: `cd client` -4. Run: `npm start` - - At this point you should have your app running on [http://localhost:3000](http://localhost:3000), which displays the default React layout. - -5. In the `client/src` directory, find all of the React app files. Let’s clean it up a bit. -6. Open the `App.tsx` file and update its content to be: - -```js -function App() { - return
My app goes here
; -} - -export default App; -``` - -Once you save the changes, you should see that the app content has changed in the browser and displays `My app goes here`. - -7. Open the `App.tsx` file and remove the `import './App.css';` and `import logo from './logo.svg';` lines. Since we remove the default imports on this file, we can remove some files in our project. Delete the files `App.css` and `logo.svg`. -8. Open the `index.tsx` file and remove the `import './index.css';` line at the top of the file. - Now you can also delete the `src/index.css` file. - -## Our dapp UI - -First we will build the dApp UI layout. We have two UI states for the app: - -- When an account hasn’t created a list yet (on the left). -- When an account has created a list and can now add tasks to it (on the right). - ![dapp-ui](../../../static/img/docs/build-e2e-dapp-img-3.png) - -We will use the [Ant Design](https://ant.design/) library for our UI: - -1. Stop the local server if running. -2. On to the `client` directory and install our UI library package: `npm i antd@5.1.4` -3. Update `App.tsx` with the initial state UI: - -```jsx -return ( - <> - - - -

Our todolist

- - -

Connect Wallet

- -
-
- -); -``` - -4. Dont forget to import the _components_ we just added: - -```js -import { Layout, Row, Col } from "antd"; -``` - -5. Run the local server with `npm start`, you should see the _header_ that matches our UI mockup. - -It is now time to [add wallet support](./3-add-wallet-support.md) in chapter 3. diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/3-add-wallet-support.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/3-add-wallet-support.md deleted file mode 100644 index e91580c6371f2..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/3-add-wallet-support.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -title: "3. Add Wallet Support" -id: "add-wallet-support" ---- - -# 3. Add Wallet Support - -In the third chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md), you will be adding _wallet_ support to your [React app](./2-set-up-react-app.md). You now need a wallet to submit a transaction to the blockchain. - -Aptos provides a [wallet adapter](../../integration/wallet-adapter-concept.md) that supports many ecosystem wallets to offering a common interface and UI package that can be used to add a wallet connect button and a wallet selector modal. - -1. Stop the local server if running. -2. In the `client` folder, run: - -```cmd -npm i @aptos-labs/wallet-adapter-react@1.0.2 -``` - -```cmd -npm i @aptos-labs/wallet-adapter-ant-design@1.0.0 -``` - -This installs two packages: - -- the adapter React provider that holds the logic. -- a wallet connect UI package. - -3. We now need to add wallets to our app. There is a list of [wallets the adapter supports](https://github.com/aptos-labs/aptos-wallet-adapter#supported-wallet-packages); but to keep this tutorial simple, we will use only one wallet. - Still in the `client` folder, run - -```cmd -npm i petra-plugin-wallet-adapter -``` - -:::tip -If you haven't installed the Petra wallet extension yet: - -1. [Install Petra Wallet](https://petra.app) and open the Chrome extension. -2. Follow the [user instructions](https://petra.app/docs/use) on petra.app for help. -3. Switch to the Devnet network by clicking **Settings** > **Network** and selecting **devnet**. -4. Click the **Faucet** button to ensure you can receive test tokens. - -::: - -4. Open `Index.tsx` file. At the top of the file, add the following: - -```js -import { PetraWallet } from "petra-plugin-wallet-adapter"; -import { AptosWalletAdapterProvider } from "@aptos-labs/wallet-adapter-react"; -``` - -5. Still in `Index.tsx`, add a constant that holds an array of wallets: - -```js -... -const wallets = [new PetraWallet()]; -... -``` - -6. Inside the `render` method, update the code with the following: - -```jsx -... - - - -... -``` - -That wraps our app with the adapter provider and initializes it with our wallets. It also sets the provider to autoConnect a wallet. - -7. Open the `App.tsx` file and import the wallet connect UI package we installed in the previous step. At the top of the file add the following: - -```js -import { WalletSelector } from "@aptos-labs/wallet-adapter-ant-design"; -``` - -8. The UI package uses a style `.css` file; let's import that one also at the bottom of the import statements. - -```js -... -import "@aptos-labs/wallet-adapter-ant-design/dist/index.css"; -``` - -9. In the `return` statement, remove the `

Connect Wallet

` text and add the `WalletSelector` component: - -```jsx -... - - - -... -``` - -10. Start the local server with `npm start` and open the app in the browser. - -We now have a working Wallet connect button and a wallet selector modal. Feel free to play with it and connect a wallet with it. - -Then learn how to [fetch data from chain](./4-fetch-data-from-chain.md) in chapter 4. diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/4-fetch-data-from-chain.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/4-fetch-data-from-chain.md deleted file mode 100644 index 7feb0c96d1704..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/4-fetch-data-from-chain.md +++ /dev/null @@ -1,151 +0,0 @@ ---- -title: "4. Fetch Data from Chain" -id: "fetch-data-from-chain" ---- - -# 4. Fetch Data from Chain - -In the fourth chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md), you will be learning to fetch data from chain. - -Our UI logic relies on whether the connected account has created a todo list. If the account has created a todo list, our app should display that list; if not, the app should display a button offering the option to create a new list. - -For that, we first need to check if the connected account has a `TodoList` resource. In our smart contract, whenever someone creates a todo list we create and assign a `TodoList` resource to their account. - -To fetch data from chain, we can use the [Aptos TypeScript SDK](../../sdks/ts-sdk/index.md). The SDK provides classes and functions for us to easily interact and query the Aptos chain. - -To get started: - -1. Stop the local server if running. -2. In the `client` directory, run: `npm i aptos` -3. In the `App.tsx` file, import the `Provider` class and the `Network` type like so: - -```js -import { Provider, Network } from "aptos"; -``` - -The TypeScript SDK provides us with a `Provider` class where we can initialize and query the Aptos chain and Indexer. `Provider` expects `Network` type as an argument, which is the [network name](../../guides/system-integrators-guide.md#choose-a-network) we want to interact with. - -:::tip -Read more about the [`Provider`](../../sdks/ts-sdk/typescript-sdk-overview.md#provider-class) class in the Aptos TypeScript SDK overview. -::: - -1. In the `App.tsx` file, add: - -```js -const provider = new Provider(Network.DEVNET); -``` - -This will initialize a `Provider` instance for us with the devnet network. - -Our app displays different UIs based on a user resource (i.e if a user has a list ⇒ if a user has a `TodoList` resource). For that, we need to know the current account connected to our app. - -1. Import wallet from the wallet adapter React provider: - -```js -import { useWallet } from "@aptos-labs/wallet-adapter-react"; -``` - -2. Extract the account object from the wallet adapter: - -```js -function App ( - const { account } = useWallet(); - ... -) -``` - -The `account` object is `null` if there is no account connected; when an account is connected, the `account` object holds the account information, including the account address. - -3. Next, we want to fetch the account’s TodoList resource. - Begin by importing `useEffect` by using `jsx import useEffect from "react"; ` - Let’s add a `useEffect` hook to our file that would call a function to fetch the resource whenever our account address changes: - -```jsx -function App() { - ... - useEffect(() => { - fetchList(); - }, [account?.address]); - ... -} -``` - -4. Before creating our `fetchList` function, let’s also create a local state to store whether the account has a list: - -```js -function App ( - ... - const [accountHasList, setAccountHasList] = useState(false); - ... -) -``` -also import `useEffect` using -```import { useState, useEffect } from "react"; ``` - -5. Our `useEffect` hook is calling a `fetchList` function; let’s create it: - -```jsx -const fetchList = async () => { - if (!account) return []; - // change this to be your module account address - const moduleAddress = "0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018"; - try { - const TodoListResource = await provider.getAccountResource( - account.address, - `${moduleAddress}::todolist::TodoList` - ); - setAccountHasList(true); - } catch (e: any) { - setAccountHasList(false); - } -}; -``` - -The `moduleAddress` is the address we publish the module under, i.e the account address you have in your `Move.toml` file (`myaddr`). - -The `provider.getAccountResource()`expects an _account address_ that holds the resource we are looking for and a string representation of an on-chain _Move struct type_. - -- account address - is the current connected account (we are getting it from the wallet account object) -- Move struct type string syntax: - - The account address who holds the move module = our profile account address (You might want to change the `moduleAddress` const to be your own account address) - - The module name the resource lives in = `todolist` - - The resource name = `TodoList` - -If the request succeeds and there is a resource for that account, we want to set our local state to `true`; otherwise, we would set it to `false`. - -6. Let’s update ```import { Layout, Row, Col } from "antd"; ``` to import Button: - ```import { Layout, Row, Col, Button } from "antd"; ``` - -7. Let’s update our UI based on the `accountHasList` state: - -```jsx -return ( - <> - - - -

Our todolist

- - - - -
-
- {!accountHasList && ( - - - - - - )} - -); -``` - -We now have an **Add new list** button that appears only if the account doesn’t have a list. - -Start the local server with `npm start`. You should see the **Add new list** button. - -Next, let’s understand how to create a new list by [submitting data to chain](./5-submit-data-to-chain.md) in chapter 5. diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/5-submit-data-to-chain.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/5-submit-data-to-chain.md deleted file mode 100644 index 6e51334c7a646..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/5-submit-data-to-chain.md +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: "5. Submit Data to Chain" -id: "submit-data-to-chain" ---- - -# 5. Submit Data to Chain - -In the fifth chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md), you will be submitting data to the chain. - -So now we have an **Add new list** button that appears if the connected account hasn’t created a list yet. We still don't have a way for an account to create a list, so let’s add that functionality. - -1. First, our wallet adapter provider has a `signAndSubmitTransaction` function; let’s extract it by updating the following: - -```js -const { account, signAndSubmitTransaction } = useWallet(); -``` - -2. Add an `onClick` event to the new list button: - -```js - -``` - -3. Add the `addNewList` function: - -```js -const addNewList = async () => { - if (!account) return []; - // build a transaction payload to be submited - const payload = { - type: "entry_function_payload", - function: `${moduleAddress}::todolist::create_list`, - type_arguments: [], - arguments: [], - }; - try { - // sign and submit transaction to chain - const response = await signAndSubmitTransaction(payload); - // wait for transaction - await provider.waitForTransaction(response.hash); - setAccountHasList(true); - } catch (error: any) { - setAccountHasList(false); - } -}; -``` - -4. Since our new function also uses `moduleAddress`, let’s get it out of the `fetchList` function scope to the global scope so it can be used globally. - -In our `fetchList` function, find the line: - -```js -// replace with your own address -const moduleAddress = "0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018"; -``` - -And move it to outside of the main `App` function, right beneath our const `provider` declarations. - -```js -export const provider = new Provider(Network.DEVNET); -// change this to be your module account address -export const moduleAddress = "0xcbddf398841353776903dbab2fdaefc54f181d07e114ae818b1a67af28d1b018"; -``` - -**Let’s go over the `addNewList` function code.** - -First, we use the `account` property from our wallet provider to make sure there is an account connected to our app. - -Then we build our transaction payload to be submitted to chain: - -```js -const payload = { - type: "entry_function_payload", - function: `${moduleAddress}::todolist::create_list`, - type_arguments: [], - arguments: [], -}; -``` - -- `type` is the function type we want to hit - our `create_list` function is an `entry` type function. -- `function`- is built from the module address, module name and the function name. -- `type_arguments`- this is for the case a Move function expects a generic type argument. -- `arguments` - the arguments the function expects, in our case it doesn’t expect any arguments. - -Next, we submit the transaction payload and wait for its response. The response returned from the `signAndSubmitTransaction` function holds the transaction hash. Since it can take a bit for the transaction to be fully submitted to chain and we also want to make sure it is submitted successfully, we `waitForTransaction`. And only then we can set our local `accountHasList` state to `true`. - -5. Before testing our app, let’s tweak our UI a bit and add a Spinner component to show up while we are waiting for the transaction. - Add a local state to keep track whether a transaction is in progress: - -```ts -const [transactionInProgress, setTransactionInProgress] = useState(false); -``` - -6. Update our `addNewList` function to update the local state: - -```js -const addNewList = async () => { - if (!account) return []; - setTransactionInProgress(true); - // build a transaction payload to be submited - const payload = { - type: "entry_function_payload", - function: `${moduleAddress}::todolist::create_list`, - type_arguments: [], - arguments: [], - }; - try { - // sign and submit transaction to chain - const response = await signAndSubmitTransaction(payload); - // wait for transaction - await provider.waitForTransaction(response.hash); - setAccountHasList(true); - } catch (error: any) { - setAccountHasList(false); - } finally { - setTransactionInProgress(false); - } -}; -``` - -7. Update our UI with the following: - -```jsx -return ( - <> - ... - - {!accountHasList && ( - - - - - - )} - - -); -``` - -Now you can head over to our app, and add a new list! - -Since you haven’t made the user interface able to handle cases where an account has created a list, you will do so next [handling tasks](./6-handle-tasks.md) in chapter 6. diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/6-handle-tasks.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/6-handle-tasks.md deleted file mode 100644 index 8ec616efd1283..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/6-handle-tasks.md +++ /dev/null @@ -1,434 +0,0 @@ ---- -title: "6. Handle Tasks" -id: "handle-tasks" ---- - -# 6. Handle Tasks - -In the sixth and final chapter of the tutorial on [building an end-to-end dapp on Aptos](./index.md), you will add functionality to the app so the user interface is able to handle cases where an account has created a list. - -We have covered how to [fetch data](./4-fetch-data-from-chain.md) (an account’s todo list) from chain and how to [submit a transaction](./5-submit-data-to-chain.md) (new todo list) to chain using Wallet. - -Let’s finish building our app by implementing fetch tasks and adding a task function. - -## Fetch tasks - -1. Create a local state `tasks` that will hold our tasks. It will be a state of a Task type (that has the same properties we set on our smart contract): - -```ts -type Task = { - address: string; - completed: boolean; - content: string; - task_id: string; -}; - -function App() { - const [tasks, setTasks] = useState([]); - ... -} -``` - -2. Update our `fetchList` function to fetch the tasks in the account’s `TodoList` resource: - -```js -const fetchList = async () => { - if (!account) return []; - try { - const TodoListResource = await provider.getAccountResource( - account?.address, - `${moduleAddress}::todolist::TodoList` - ); - setAccountHasList(true); - // tasks table handle - const tableHandle = (TodoListResource as any).data.tasks.handle; - // tasks table counter - const taskCounter = (TodoListResource as any).data.task_counter; - - let tasks = []; - let counter = 1; - while (counter <= taskCounter) { - const tableItem = { - key_type: "u64", - value_type: `${moduleAddress}::todolist::Task`, - key: `${counter}`, - }; - const task = await provider.getTableItem(tableHandle, tableItem); - tasks.push(task); - counter++; - } - // set tasks in local state - setTasks(tasks); - } catch (e: any) { - setAccountHasList(false); - } -}; -``` - -**This part is a bit confusing, so stick with us!** - -Tasks are stored in a table (this is how we built our contract). To fetch a table item (i.e a task), we need that task's table handle. We also need the `task_counter` in that resource so we can loop over and fetch the task with the `task_id` that matches the `task_counter`. - -```js -const tableHandle = (TodoListResource as any).data.tasks.handle; -const taskCounter = (TodoListResource as any).data.task_counter; -``` - -Now that we have our tasks table handle and our `task_counter` variable, lets loop over the `taskCounter` . We define a `counter` and set it to 1 as the task_counter / task_id is never less than 1. - -We loop while the `counter` is less then the `taskCounter` and fetch the table item and push it to the tasks array: - -```js -let tasks = []; -let counter = 1; -while (counter <= taskCounter) { - const tableItem = { - key_type: "u64", - value_type: `${moduleAddress}::todolist::Task`, - key: `${counter}`, - }; - const task = await provider.getTableItem(tableHandle, tableItem); - tasks.push(task); - counter++; -} -``` - -We build a `tableItem` object to fetch. If we take a look at our table structure from the contract: - -```rust -tasks: Table, -``` - -We see that it has a `key` type `u64` and a `value` of type `Task`. And whenever we create a new task, we assign the `key` to be the incremented task counter. - -```rust -// adds the new task into the tasks table -table::upsert(&mut todo_list.tasks, counter, new_task); -``` - -So the object we built is: - -```js -{ - key_type: "u64", - value_type:`${moduleAddress}::todolist::Task`, - key: `${taskCounter}`, -} -``` - -Where `key_type` is the table `key` type, `key` is the key value we are looking for, and the `value_type` is the table `value` which is a `Task` struct. The Task struct uses the same format from our previous resource query: - -- The account address who holds that module = our profile account address -- The module name the resource lives in = `todolist` -- The struct name = `Task` - -The last thing we want to do is display the tasks we just fetched. - -6. In our `App.tsx` file, update our UI with the following code: - -```jsx -{ - !accountHasList ? ( - - - - - - ) : ( - - - {tasks && ( - ( - ]}> - {`${task.address.slice(0, 6)}...${task.address.slice(-5)}`}
- } - /> - - )} - /> - )} - - - ); -} -``` - -That will display the **Add new list** button if account doesn’t have a list or instead the tasks if the account has a list. - -Go ahead and refresh your browser - see the magic! - -We haven’t added any tasks yet, so we simply see a box of empty data. Let’s add some tasks! - -## Add task - -1. Update our UI with an _add task_ input: - -```jsx -{!accountHasList ? ( - ... -) : ( - - // Add this! - - - - - - - ... - -)} -``` - -We have added a text input to write the task and a button to add the task. - -2. Create a new local state that holds the task content: - -```jsx -function App() { - ... - const [newTask, setNewTask] = useState(""); - ... -} -``` - -3. Add an `onWriteTask` function that will get called whenever a user types something in the input text: - -```jsx -function App() { - ... - const [newTask, setNewTask] = useState(""); - - const onWriteTask = (event: React.ChangeEvent) => { - const value = event.target.value; - setNewTask(value); - }; - ... -} -``` - -4. Find our `` component, add the `onChange` event to it, pass it our `onWriteTask` function and set the input value to be the `newTask` local state: - -```jsx - onWriteTask(event)} // add this - style={{ width: "calc(100% - 60px)" }} - placeholder="Add a Task" - size="large" - value={newTask} // add this -/> -``` - -Cool! Now we have a working flow that when the user types something on the Input component, a function will get fired and set our local state with that content. - -5. Let’s also add a function that submits the typed task to chain! Find our Add ` -``` - -That adds an `onClickevent` that triggers an `onTaskAdded` function. - -When someones adds a new task we: - -- want to verify they are connected with a wallet. -- build a transaction payload that would be submitted to chain. -- submit it to chain using our wallet. -- wait for the transaction. -- update our UI with that new task (without the need to refresh the page). - -6. Add an `onTaskAdded` function with: - -```jsx - const onTaskAdded = async () => { - // check for connected account - if (!account) return; - setTransactionInProgress(true); - // build a transaction payload to be submited - const payload = { - type: "entry_function_payload", - function: `${moduleAddress}::todolist::create_task`, - type_arguments: [], - arguments: [newTask], - }; - - // hold the latest task.task_id from our local state - const latestId = tasks.length > 0 ? parseInt(tasks[tasks.length - 1].task_id) + 1 : 1; - - // build a newTaskToPush object into our local state - const newTaskToPush = { - address: account.address, - completed: false, - content: newTask, - task_id: latestId + "", - }; - - try { - // sign and submit transaction to chain - const response = await signAndSubmitTransaction(payload); - // wait for transaction - await provider.waitForTransaction(response.hash); - - // Create a new array based on current state: - let newTasks = [...tasks]; - - // Add item to the tasks array - newTasks.push(newTaskToPush); - // Set state - setTasks(newTasks); - // clear input text - setNewTask(""); - } catch (error: any) { - console.log("error", error); - } finally { - setTransactionInProgress(false); - } - }; -``` - -**Let’s go over on what is happening.** - -First, note we use the `account` property from our wallet provider to make sure there is an account connected to our app. - -Then we build our transaction payload to be submitted to chain: - -```js -const payload = { - type: "entry_function_payload", - function: `${moduleAddress}::todolist::create_task`, - type_arguments: [], - arguments: [newTask], -}; -``` - -- `type` is the function type we want to hit - our `create_task` function is an `entry` type function. -- `function`- is built from the module address, module name and the function name. -- `type_arguments`- this is for the case a Move function expects a generic type argument. -- `arguments` - the arguments the function expects, in our case the task content. - -Then, within our try/catch block, we use a wallet provider function to submit the transaction to chain and an SDK function to wait for that transaction. -If all goes well, we want to find the current latest task ID so we can add it to our current tasks state array. We will also create a new task to push to the current tasks state array (so we can display the new task in our tasks list on the UI without the need to refresh the page). - -TRY IT! - -Type a new task in the text input, click **Add**, approve the transaction and see it being added to the tasks list. - -## Mark task as completed - -Next, we can implement the `complete_task` function. We have the checkbox in our UI so users can mark a task as completed. - -1. Update the `` component with an `onCheck` property that would call an `onCheckboxChange` function once it is checked: - -```jsx - onCheckboxChange(event, task.task_id)}/> -]}> -``` - -2. Create the `onCheckboxChange` function (make sure to import `CheckboxChangeEvent` from `antd` - `import { CheckboxChangeEvent } from "antd/es/checkbox";`): - -```js -const onCheckboxChange = async ( - event: CheckboxChangeEvent, - taskId: string - ) => { - if (!account) return; - if (!event.target.checked) return; - setTransactionInProgress(true); - const payload = { - type: "entry_function_payload", - function: - `${moduleAddress}::todolist::complete_task`, - type_arguments: [], - arguments: [taskId], - }; - - try { - // sign and submit transaction to chain - const response = await signAndSubmitTransaction(payload); - // wait for transaction - await provider.waitForTransaction(response.hash); - - setTasks((prevState) => { - const newState = prevState.map((obj) => { - // if task_id equals the checked taskId, update completed property - if (obj.task_id === taskId) { - return { ...obj, completed: true }; - } - - // otherwise return object as is - return obj; - }); - - return newState; - }); - } catch (error: any) { - console.log("error", error); - } finally { - setTransactionInProgress(false); - } - }; -``` - -Here we basically do the same thing we did when we created a new list or a new task. - -We make sure there is an account connected, set the transaction in progress state, build the transaction payload, submit the transaction, wait for it and update the task on the UI as completed. - -3. Update the `Checkbox` component to be checked by default if a task has already marked as completed: - -```jsx -... - - {task.completed ? ( - - ) : ( - - onCheckboxChange(event, task.task_id) - } - /> - )} - , - ]} -> -... -``` - -Try it! Check a task’s checkbox, approve the transaction and see the task marked as completed. - -You have now learned how to build a dapp on Aptos from end to end. Congratulations! Tell your friends. :-) diff --git a/developer-docs-site/docs/tutorials/build-e2e-dapp/index.md b/developer-docs-site/docs/tutorials/build-e2e-dapp/index.md deleted file mode 100644 index d52f4c4c82239..0000000000000 --- a/developer-docs-site/docs/tutorials/build-e2e-dapp/index.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: "Build E2E Dapp on Aptos" -slug: "e2e-dapp-index" ---- - -# Build an End-to-End Dapp on Aptos - -A common way to learn a new framework or programming language is to build a simple todo list. In this tutorial, we will learn how to build an end-to-end todo list dapp, starting from the smart contract side through the front-end side and finally use of a wallet to interact with the two. - -See the completed code in the [my_first_dapp](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/my_first_dapp). - -## Chapters - -After meeting the [prerequisites](#prerequisites) and [getting set up](#setup) as described below, you will follow this tutorial in this order: - -1. [Create a smart contract](./1-create-smart-contract.md) -2. [Set up React app](./2-set-up-react-app.md) -3. [Add Wallet support](3-add-wallet-support.md) -4. [Fetch Data from Chain](4-fetch-data-from-chain.md) -5. [Submit data to chain](./5-submit-data-to-chain.md) -6. [Handle Tasks](./6-handle-tasks.md) - -## Prerequisites - -You must have: - -* [Aptos CLI](../../tools/aptos-cli/install-cli/index.md) `@1.0.4` or later -* [Aptos TypeScript SDK](../../sdks/ts-sdk/index.md) `@1.7.1` or later -* [Aptos Wallet Adapter](../../integration/wallet-adapter-concept.md) `@1.0.2` or later -* [Create React App](https://create-react-app.dev/) -* [node and npm](https://nodejs.org/en/) - -Although we will explain some React decisions, we are not going to deep dive into how React works; so we assume you have some previous experience with React. - -## Setup - -In this section, we will create a `my-first-dapp` directory to hold our project files, both client-side code (React based)and the Move code (our smart contract). - -1. Open a terminal and navigate to the desired directory for the project (for example, the `Desktop` directory). -2. Create a new directory called `my-first-dapp`, for example: - ```shell - mkdir my-first-dapp - ``` -3. Navigate into that directory: - ```shell - cd my-first-dapp - ``` - - Now let's [create a smart contract](./1-create-smart-contract.md). diff --git a/developer-docs-site/docs/tutorials/first-coin.md b/developer-docs-site/docs/tutorials/first-coin.md deleted file mode 100644 index 92bbb72be7509..0000000000000 --- a/developer-docs-site/docs/tutorials/first-coin.md +++ /dev/null @@ -1,365 +0,0 @@ ---- -title: "Your First Coin" -slug: "your-first-coin" ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Your First Coin - -This tutorial introduces how you can compile, deploy, and mint your own coin, named [MoonCoin](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/moon_coin). - -## Step 1: Pick an SDK - -Install your preferred SDK from the below list: - -* [TypeScript SDK](../sdks/new-ts-sdk/index.md) -* [Python SDK](../sdks/python-sdk.md) -* [Rust SDK](../sdks/rust-sdk.md) - ---- - -## Step 2: Install the CLI - -[Install the precompiled binary for the Aptos CLI](../tools/aptos-cli/install-cli/index.md). - ---- - -## Step 3: Run the example - - - - -Clone the `aptos-ts-sdk` repo: - - -```bash -git clone https://github.com/aptos-labs/aptos-ts-sdk.git -``` - -Navigate to the TypeScript SDK directory: - -```bash -cd ~/examples/typescript/ -``` - -Install the necessary dependencies: - -```bash -pnpm install -``` - -Run the TypeScript [`your_coin`](https://github.com/aptos-labs/aptos-ts-sdk/blob/main/examples/typescript/your_coin.ts) example: - -```bash -pnpm run your_coin -``` - -The application will complete, printing: - -```ts -Bob's initial MoonCoin balance: 0. -Alice mints herself 100 MoonCoin. -Alice transfers 100 MoonCoin to Bob. -Bob's updated MoonCoin balance: 100. -``` - - - - -Clone the `aptos-core` repo: - - -```bash -git clone https://github.com/aptos-labs/aptos-core -``` -Navigate to the Python SDK directory: - -```bash -cd ~/aptos-core/ecosystem/python/sdk -``` - -Install the necessary dependencies: - -```bash -curl -sSL https://install.python-poetry.org | python3 -poetry install -``` - -Run the Python [`your_coin`](https://github.com/aptos-labs/aptos-core/blob/main/ecosystem/python/sdk/examples/your_coin.py) example: - -```bash -poetry run python -m examples.your_coin ~/aptos-core/aptos-move/move-examples/moon_coin -``` - -### Step 3.1: Build the package - -The example run will pause with the following output: - -```bash -=== Addresses === -Alice: 0x5e603a89cf690d7134cf2f24fdb16ba90c4f5686333721c12e835fb6c76bc7ba -Bob: 0xc8421fa4a99153f955e50f1de2a6acff2f3fd0bb33aa17ba1f5b32b699f6c825 - -Update the package with Alice's address, compile, and press enter. -``` - -At this point, open another terminal and change directories to the MoonCoin package's directory: - -```bash -cd ~/aptos-core/aptos-move/move-examples/moon_coin -``` - -Next, build the package using the CLI: - -```bash -aptos move compile --named-addresses MoonCoin=0x5e603a89cf690d7134cf2f24fdb16ba90c4f5686333721c12e835fb6c76bc7ba --save-metadata -``` - -The `--named-addresses` is a list of address mappings that must be translated in order for the package to be compiled to be stored in Alice's account. Notice how `MoonCoin` is set to Alice's address printed above. Also `--save-metadata` is required to publish the package. - ---- - -### Step 3.2: Completing the example - -Returning to the previous prompt, press ENTER as the package is now ready to be published. - -The application will complete, printing: - -```bash - -Publishing MoonCoin package. - -Bob registers the newly created coin so he can receive it from Alice. -Bob's initial MoonCoin balance: 0. -Alice mints Bob some of the new coin. -Bob's updated MoonCoin balance: 100. -``` - - - - ---- - - - -## Step 4: MoonCoin in depth - -### Step 4.1: Building and publishing the MoonCoin package - -Move contracts are effectively a set of Move modules known as a package. When deploying or upgrading a new package, the compiler must be invoked with `--save-metadata` to publish the package. In the case of MoonCoin, the following output files are critical: - -- `build/Examples/package-metadata.bcs`: Contains the metadata associated with the package. -- `build/Examples/bytecode_modules/moon_coin.mv`: Contains the bytecode for the `moon_coin.move` module. - -These are read by the example and published to the Aptos blockchain: - - - - -In the TypeScript example, we use `aptos move build-publish-payload` command to compile and build the module. -That command builds the `build` folder that contains the `package-metadata.bcs` and the bytecode for the `moon_coin.mv` module. The command also builds a publication transaction payload and stores it in a JSON output file that we can later read from to get the `metadataBytes` and `byteCode` to publish the contract to chain with. - -Compile the package: -```ts -export function compilePackage( - packageDir: string, - outputFile: string, - namedAddresses: Array<{ name: string; address: AccountAddress }>, -) { - const addressArg = namedAddresses.map(({ name, address }) => `${name}=${address}`).join(" "); - // Assume-yes automatically overwrites the previous compiled version, only do this if you are sure you want to overwrite the previous version. - const compileCommand = `aptos move build-publish-payload --json-output-file ${outputFile} --package-dir ${packageDir} --named-addresses ${addressArg} --assume-yes`; - execSync(compileCommand); -} - -compilePackage("move/moonCoin", "move/moonCoin/moonCoin.json", [{ name: "MoonCoin", address: alice.accountAddress }]); -``` - -Publish the package to chain: -```ts -export function getPackageBytesToPublish(filePath: string) { - // current working directory - the root folder of this repo - const cwd = process.cwd(); - // target directory - current working directory + filePath (filePath json file is generated with the prevoius, compilePackage, cli command) - const modulePath = path.join(cwd, filePath); - - const jsonData = JSON.parse(fs.readFileSync(modulePath, "utf8")); - - const metadataBytes = jsonData.args[0].value; - const byteCode = jsonData.args[1].value; - - return { metadataBytes, byteCode }; -} - -const { metadataBytes, byteCode } = getPackageBytesToPublish("move/moonCoin/moonCoin.json"); - -// Publish MoonCoin package to chain -const transaction = await aptos.publishPackageTransaction({ - account: alice.accountAddress, - metadataBytes, - moduleBytecode: byteCode, -}); - -const pendingTransaction = await aptos.signAndSubmitTransaction({ - signer: alice, - transaction, -}); - -await aptos.waitForTransaction({ transactionHash: pendingTransaction.hash }); -``` - - - - -```python -:!: static/sdks/python/examples/your_coin.py publish -``` - - - - - Coming soon. - - - - ---- - -### Step 4.2: Understanding the MoonCoin module - -The MoonCoin module defines the `MoonCoin` struct, or the distinct type of coin type. In addition, it contains a function called `init_module`. The `init_module` function is called when the module is published. In this case, MoonCoin initializes the `MoonCoin` coin type as a `ManagedCoin`, which is maintained by the owner of the account. - -:::tip ManagedCoin framework -[`ManagedCoin`](https://github.com/aptos-labs/aptos-core/blob/f81ccb01f00227f9c0f36856fead4879f185a9f6/aptos-move/framework/aptos-framework/sources/managed_coin.move#L1) is a simple coin management framework for coins directly managed by users. It provides convenience wrappers around `mint` and `burn`. -::: - -```rust -:!: static/move-examples/moon_coin/sources/MoonCoin.move moon -``` - ---- - -### Step 4.3: Understanding coins - -Coins have several primitives: - -- **Minting**: Creating new coins. -- **Burning**: Deleting coins. -- **Freezing**: Preventing an account from storing coins in `CoinStore`. -- **Registering**: Creating a `CoinStore` resource on an account for storing coins. -- **Transferring**: Withdrawing and depositing coins into `CoinStore`. - -:::tip - -The entity that creates a new coin gains the capabilities for minting, burning, and freezing. -::: - ---- - -#### Step 4.3.1: Initializing a coin - -Once a coin type has been published to the Aptos blockchain, the entity that published that coin type can initialize it: - -```rust showLineNumbers -public fun initialize( - account: &signer, - name: string::String, - symbol: string::String, - decimals: u8, - monitor_supply: bool, -): (BurnCapability, FreezeCapability, MintCapability) { - let account_addr = signer::address_of(account); - - assert!( - coin_address() == account_addr, - error::invalid_argument(ECOIN_INFO_ADDRESS_MISMATCH), - ); - - assert!( - !exists>(account_addr), - error::already_exists(ECOIN_INFO_ALREADY_PUBLISHED), - ); - - let coin_info = CoinInfo { - name, - symbol, - decimals, - supply: if (monitor_supply) { option::some(optional_aggregator::new(MAX_U128, false)) } else { option::none() }, - }; - move_to(account, coin_info); - - (BurnCapability{ }, FreezeCapability{ }, MintCapability{ }) -} -``` - -This ensures that this coin type has never been initialized before. Notice the check on lines 10 and 15 to ensure that the caller to `initialize` is the same one that actually published this module, and that there is no `CoinInfo` stored on their account. If both those conditions check, then a `CoinInfo` is stored and the caller obtains capabilities for burning, freezing, and minting. - -:::tip -MoonCoin calls this `initialize` function automatically upon package publishing. -::: - ---- - -#### Step 4.3.2: Registering a coin - -To use a coin, an entity must register a `CoinStore` for it on their account: - -```rust -public entry fun registerCoinType(account: &signer) { -``` - -MoonCoin uses `ManagedCoin` that provides an entry function wrapper: `managed_coin::register`. Here is an example script for registration: - -```rust -:!: static/move-examples/moon_coin/scripts/register.move moon -``` - ---- - -#### Step 4.3.3: Minting a coin - -Minting coins requires the mint capability that was produced during initialization. the function `mint` (see below) takes in that capability and an amount, and returns back a `Coin` struct containing that amount of coins. If the coin tracks supply, it will be updated. - -```rust -public fun mint( - amount: u64, - _cap: &MintCapability, -): Coin acquires CoinInfo { - if (amount == 0) { - return zero() - }; - - let maybe_supply = &mut borrow_global_mut>(coin_address()).supply; - if (option::is_some(maybe_supply)) { - let supply = option::borrow_mut(maybe_supply); - optional_aggregator::add(supply, (amount as u128)); - }; - - Coin { value: amount } -} -``` - -`ManagedCoin` makes this easier by providing an entry function `managed_coin::mint`. - ---- - -#### Step 4.3.4: Transferring a coin - -Aptos provides several building blocks to support coin transfers: - -- `coin::deposit`: Allows any entity to deposit a coin into an account that has already called `coin::register`. -- `coin::withdraw`: Allows any entity to extract a coin amount from their account. -- `aptos_account::transfer_coins`: Transfer coins of specific CoinType to a receiver. - -:::tip important -There are two separate withdraw and deposit events instead of a single transfer event. -::: - -## Supporting documentation -* [Aptos CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md) -* [TypeScript SDK](../sdks/new-ts-sdk/index.md) -* [Python SDK](../sdks/python-sdk.md) -* [Rust SDK](../sdks/rust-sdk.md) -* [REST API specification](https://aptos.dev/nodes/aptos-api-spec#/) diff --git a/developer-docs-site/docs/tutorials/first-dapp.md b/developer-docs-site/docs/tutorials/first-dapp.md deleted file mode 100644 index 6e264fa172093..0000000000000 --- a/developer-docs-site/docs/tutorials/first-dapp.md +++ /dev/null @@ -1,845 +0,0 @@ ---- -title: "Your First Dapp" -slug: "your-first-dapp" ---- - -# Your First Dapp - -In this tutorial, you will learn how to build a [dapp](https://en.wikipedia.org/wiki/Decentralized_application) -on the Aptos blockchain. A dapp usually consists of a graphical user interface, which interacts with one or more Move -modules. This dapp will let users publish and share snippets of text on the Aptos blockchain. - -For this tutorial, we will use the Move module [`hello_blockchain`](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/hello_blockchain) -described in [Your First Move Module](first-move-module.md) and focus on building the user interface around the module. - -For a more comprehensive view of this process, see [Build an End-to-End Dapp on Aptos](build-e2e-dapp/index.md). - -We will use the: - -* [TypeScript SDK](../sdks/ts-sdk/index.md) -* [Petra Wallet](https://petra.app) -* [Aptos CLI](../tools/aptos-cli/use-cli/use-aptos-cli.md) - -:::tip Full source code - -We recommend becoming familiar with the newer full source code documented in the [Build an End-to-End Dapp on Aptos](build-e2e-dapp/index.md) tutorial. The full source code for this tutorial is still available in the [`dapp-example`](https://github.com/aptos-labs/aptos-core/tree/53e240003e95c9b865441ea792ab4e1e8134a267/developer-docs-site/static/examples/typescript/dapp-example) directory. -::: - -## Prerequisites - -### Aptos Wallet - -Before starting this tutorial, you'll need a chrome extension wallet to interact with the dapp, such as, the [Petra wallet extension](https://petra.app). - -If you haven't installed the Petra wallet extension before: -1. Open the Wallet and click **Create a new wallet**. Then click **Create account** to create an Aptos Account. -2. Copy the private key. You will need it to set up the Aptos CLI in the next section. -3. See the [user instructions](https://petra.app/docs/use) on petra.app for help. -4. Switch to the Devnet network by clicking, settings, network, and selecting **devnet**. -5. Click the faucet button to ensure you can receive test tokens. - -If you already have the Petra wallet installed, we suggest you create a new wallet for purposes of this tutorial. -1. In the extension, go to settings, switch account, add account, create new account to create a new account. -2. Switch to the Devnet network by clicking, settings, network, and selecting **devnet**. -3. Click the faucet button to ensure you can receive test tokens. - -:::tip -Ensure your account has sufficient funds to perform transactions by clicking the **Faucet** button. -::: - -### Aptos CLI - -We will also be installing the Aptos CLI so that we can publish - -1. Install the [Aptos CLI](../tools/aptos-cli/install-cli/index.md). - -2. Run `aptos init --profile my-first-nft`. - -3. Select the network `devnet` - -4. When prompted for your private key, paste the private key from the Petra Wallet and press **Return**. - 1. You can find the private key by going to settings, manage account, show the private key, and copy that field. - -You will see output resembling: - -```text -Account has been already found onchain - ---- -Aptos CLI is now set up for account as profile my-first-nft! Run `aptos --help` for more information about commands -{ - "Result": "Success" -} -``` -This initializes the Aptos CLI to use the same account as used by the Aptos Wallet. - -5. Run `aptos account list --profile my-first-nft` to verify that it is working. You should see your account address listed in the `addr` field for all events. - -## Step 1: Set up a single page app - -We will now set up the frontend user interface for our dapp. We will use [`create-react-app`](https://create-react-app.dev/) to set up the app in this tutorial, but neither React nor `create-react-app` are required. You can use your preferred JavaScript framework. - -First run: - -```bash -npx create-react-app first-dapp --template typescript -``` - -Accept installation of the `create-react-app` package if prompted. Then navigate to the newly created `first-dapp` directory: - -```bash -cd first-dapp -``` - -And start the app with: - -```bash -npm start -``` - -You will now have a basic React app running in your browser at: http://localhost:3000/ - -## Step 2: Integrate the Aptos Wallet Web3 API - -The Aptos Wallet provides a Web3 API for dapps at `window.aptos`. You can see how it works by opening up the browser console and running `await window.aptos.account()`. It will print out the address corresponding to the account you set up in the Aptos Wallet. - -Next we will update our app to use this API to display the Wallet account's address. - -### Wait until `window.aptos` is defined - -The first step when integrating with the `window.aptos` API is to delay rendering the application until the `window.onload` event has fired. - -Quit the app by hitting Ctrl-C in the terminal running the `npm start` process. - -Still in the `first-dapp` directory, open the `src/index.tsx` file and change the following code snippet: - -```typescript -root.render( - - - -); -``` - -to this: - -```typescript -window.addEventListener('load', () => { - root.render( - - - - ); -}); -``` - -This change will ensure that the `window.aptos` API has been initialized by the time we render the app. If we render too early, the Wallet extension may not have had a chance to initialize the API yet and thus `window.aptos` will be `undefined`. - -To see the change, once again run: `npm start` - -### (Optional) TypeScript setup for `window.aptos` - -If you are using TypeScript, you may also want to inform the compiler of the existence of the `window.aptos` API. Add the following to `src/index.tsx`: - -```typescript -declare global { - interface Window { aptos: any; } -} -``` - -This lets us use the `window.aptos` API without having to do `(window as any).aptos`. - -### Display `window.aptos.account()` in the app - -Our app is now ready to use the `window.aptos` API. We will change `src/App.tsx` to retrieve the value of `window.aptos.account()` (the wallet account) on initial render, store it in state, and then display it by replacing the contents in the file with: - -```typescript -import React from 'react'; -import './App.css'; - -function App() { - // Retrieve aptos.account on initial render and store it. - const [address, setAddress] = React.useState(null); - - /** - * init function - */ - const init = async() => { - // connect - const { address, publicKey } = await window.aptos.connect(); - setAddress(address); - } - - React.useEffect(() => { - init(); - }, []); - - return ( -
-

Account Address: { address }

-
- ); -} - -export default App; -``` - -Refresh the page and you will see your account address. - -### Add some CSS - -Next, replace the contents of `src/App.css`: - -```css -a, input, textarea { - display: block; -} - -textarea { - border: 0; - min-height: 50vh; - outline: 0; - padding: 0; - width: 100%; -} -``` - -## Step 3: Use the SDK to get data from the blockchain - -The Wallet is now integrated with our dapp. Next, we will integrate the Aptos SDK to get data from the blockchain. We will use the Aptos SDK to retrieve information about our account and display that information on the page. - -### Add the `aptos` dependency to `package.json` - -First, add the SDK to the project's dependencies: - -```bash -npm install --save aptos -``` - -You will now see `"aptos": "^1.3.15"` (or similar) in your `package.json`. - -### Create an `AptosClient` - -Now we can import the SDK and create an `AptosClient` to interact with the blockchain (technically it interacts with [the REST API](https://github.com/aptos-labs/aptos-core/tree/main/api), which interacts with the blockchain). - -As our wallet account is on devnet, we will set up the `AptosClient` to interact with devnet as well. Add the following to `src/App.tsx`: - -```typescript -import { Types, AptosClient } from 'aptos'; - -// Create an AptosClient to interact with devnet. -const client = new AptosClient('https://fullnode.devnet.aptoslabs.com/v1'); - -function App() { - // ... - - // Use the AptosClient to retrieve details about the account. - const [account, setAccount] = React.useState(null); - React.useEffect(() => { - if (!address) return; - client.getAccount(address).then(setAccount); - }, [address]); - - return ( -
-

Account Address: { address }

-

Sequence Number: { account?.sequence_number }

-
- ); -} -``` - -Now, in addition to displaying the account address, the app will also display the account's `sequence_number`. This `sequence_number` represents the next transaction sequence number to prevent replay attacks of transactions. You will see this number increasing as you make transactions with the account. - -:::tip -If the account you're using for this application doesn't exist on-chain, you will not see a sequence number. You'll need -to create the account first via a faucet. -::: - -## Step 4: Publish a Move module - -Our dapp is now set up to read from the blockchain. The next step is to write to the blockchain. To do so, we will publish a Move module to our account. - -The Move module provides a location for this data to be stored. Specifically, we will use the `hello_blockchain` module from [Your First Move Module](first-move-module.md), which provides a resource called `MessageHolder` that holds a string (called `message`). - -
-Publish the `hello_blockchain` module with the Aptos CLI -We will use the Aptos CLI to compile and publish the `hello_blockchain` module. - -1. Download [the `hello_blockchain` package](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/hello_blockchain). - -2. Use the `aptos move publish` command (replacing `/path/to/hello_blockchain/` and `
`): - -```bash -aptos move publish --profile my-first-nft --package-dir /path/to/hello_blockchain/ --named-addresses hello_blockchain=
-``` - -For example: - -```bash -aptos move publish --profile my-first-nft --package-dir ~/code/aptos-core/aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481 -``` - -The `--named-addresses` replaces the named address `hello_blockchain` in `hello_blockchain.move` with the specified address. For example, if we specify `--named-addresses hello_blockchain=0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481`, then the following: - -```rust -module hello_blockchain::message { -``` - -becomes: - -```rust -module 0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481::message { -``` - -This makes it possible to publish the module for the given account, in this case our wallet account: -`0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481` - -Assuming that your account has enough funds to execute the transaction, you can now publish the `hello_blockchain` module in your account. If you refresh the app, you will see that the account sequence number has increased from 0 to 1. - -You can also verify the module was published by going to the [Aptos Explorer](https://explorer.aptoslabs.com/) and looking up your account. If you scroll down to the *Account Modules* section, you should see something resembling: - -```json -{ - "address": "0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481", - "name": "message", - "friends": [], - "exposedFunctions": [ - { - "name": "get_message", - "visibility": "public", - "genericTypeParams": [], - "params": [ - "address" - ], - "_return": [ - "0x1::string::String" - ] - }, - { - "name": "set_message", - "visibility": "script", - "genericTypeParams": [], - "params": [ - "signer", - "vector" - ], - "_return": [] - } - ], - "structs": [ - { - "name": "MessageChangeEvent", - "isNative": false, - "abilities": [ - "drop", - "store" - ], - "genericTypeParams": [], - "fields": [ - { - "name": "from_message", - "type": "0x1::string::String" - }, - { - "name": "to_message", - "type": "0x1::string::String" - } - ] - }, - { - "name": "MessageHolder", - "isNative": false, - "abilities": [ - "key" - ], - "genericTypeParams": [], - "fields": [ - { - "name": "message", - "type": "0x1::string::String" - }, - { - "name": "message_change_events", - "type": "0x1::event::EventHandle<0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481::message::MessageChangeEvent>" - } - ] - } - ] -} -``` - -Make a note of `"name": "message"; we will use it in the next section. -
- -
-Publish the `hello_blockchain` module with the TS SDK -We will use the Aptos CLI to compile the `hello_blockchain` module and use the [TypeScript SDK](../sdks/ts-sdk/index.md) to publish the module. - -1. Download the [`hello_blockchain`](https://github.com/aptos-labs/aptos-core/tree/main/aptos-move/move-examples/hello_blockchain) package. - -2. Next, use the `aptos move compile --save-metadata` command (replacing `/path/to/hello_blockchain/` and `
`): - -```bash -aptos move compile --save-metadata --package-dir /path/to/hello_blockchain/ --named-addresses hello_blockchain=
-``` - -For example: - -```bash -aptos move compile --save-metadata --package-dir ~/code/aptos-core/aptos-move/move-examples/hello_blockchain/ --named-addresses hello_blockchain=0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481 -``` - -The `--named-addresses` replaces the named address `hello_blockchain` in `hello_blockchain.move` with the specified address. For example, if we specify `--named-addresses hello_blockchain=0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481`, then the following: - -```rust -module hello_blockchain::message { -``` - -becomes: - -```rust -module 0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481::message { -``` - -This makes it possible to publish the module for the given account, in this case our wallet account: `0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481` - -The `--save-metadata` argument, if set, generates and saves the package metadata in the package's `build` directory. This metadata can be used to construct a transaction to publish a package. - -At this point, we should have a `build` folder in the same directory of our `hello_blockchain` folder. The next step would be to publish the module to the chain. -The TypeScript SDK provides us a `publishPackage()` function where it expects to get both package metadata and the move module as `Uint8Array`. We can supply this by converting both the `package-metadata.bcs` file and the `bytecode_modules/message.mv` module into hex strings (using a command, below), and then to `Uint8Array` (using the SDK). - -Convert `package-metadata.bcs` file and the `bytecode_modules/message.mv` module into hex strings: - -Navigate to the `hello_blockchain/build/Example` directory: -```bash -cd hello_blockchain/build/Example -``` - -Convert `package-metadata.bcs` to a hex string. On macOS and Linux, we can use the command: -```bash -cat package-metadata.bcs | od -v -t x1 -A n | tr -d ' \n' -``` -That will output a hex string we can later use. - -Convert `message.mv` to a hex string. On Mac and Linux we can use the command: -```bash -cat bytecode_modules/message.mv | od -v -t x1 -A n | tr -d ' \n' -``` -That will also output a hex string we can later use. Keep both of the hex strings ready! - -Back to our react app, let's add a button to click on to publish the module, use the `publishPackage` function TypeScript SDK provides us and display a link to get the account's resources where we can see the published module. - -We would need our account's private key to initialize an `AptosAccount` to publish the module with. You can get the private key from the Petra Wallet by going to: **Settings** > **Manage account**, show the private key, and copy that field. Since a private key is *very* sensitive data, we dont want to expose it in the code but rather hold it in an `.env` file and use it from there. - -1. Create a new `.env` file on the `root` of the project and add to the file: -```bash -REACT_APP_ACCOUNT_PK= -``` -Make sure to restart the local server so the app will load the new `.env` file. - -2. Add the following to `src/App.tsx`, where: -- `process.env.REACT_APP_ACCOUNT_PK` holds the account private key. -- `` is the `package-metadata.bcs` hex string output we get from the previous step. -- `` is the `message.mv` hex string output we get from the previous step. - -```typescript -import { Types, AptosClient, AptosAccount, HexString, TxnBuilderTypes} from "aptos"; - // ... - -function App() { - // ... - - // Publish the module using the TS SDK - const [publishPackageTxnHash, setPublishPackageTxnHash] = useState(null); - const [isPublishing, setIsPublishing] = useState(false); - const onPublishModule = async () => { - if (!process.env.REACT_APP_ACCOUNT_PK) return; - setIsPublishing(true); - const aptosAccount = new AptosAccount( - new HexString(process.env.REACT_APP_ACCOUNT_PK).toUint8Array() - ); - try{ - const txnHash = await client.publishPackage( - aptosAccount, - new HexString( - // package-metadata - "" - ).toUint8Array(), - [ - new TxnBuilderTypes.Module( - new HexString( - // modules - "" - ).toUint8Array() - ), - ] - ); - await client.waitForTransaction(txnHash); - setPublishPackageTxnHash(txnHash); - }catch(error: any){ - console.log("publish error", error) - }finally{ - setIsPublishing(false); - } - }; - - return ( -
- // ... -
- - {publishPackageTxnHash && ( - - )} -
-
- ); -} -``` -We wrap our publishing attempt in a `try / catch` block to catch any potential errors coming from `await client.waitForTransaction(txnHash);`. - -`waitForTransaction(txnHash)` waits for a transaction (given a transaction hash) to move past pending state and can end up in one of the 4 states: - -- processed and successfully committed to the blockchain -- rejected and is not committed to the blockchain -- committed but execution failed -- not processed within the specified timeout - -`setIsPublishing()` is an internal state to know if our app is currently publishing, if it is we want to disable the "Publish Package" button. When it is done publishing, we want to enable the "Publish Package" button. We set it to`true` when we start publishing the package and to `false` inside the `finally` block whether it succeed or not. - -`setPublishPackageTxnHash()` is an internal state for us to keep the transaction hash we just published to know if we should display the `Account modules` link - -#### Publish the package - -Click the **Publish Package** button. Once the module has been published, we should see an **Account modules** link. By clicking it, we should see something resembling: - -```json -{ - "address": "0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481", - "name": "message", - "friends": [], - "exposedFunctions": [ - { - "name": "get_message", - "visibility": "public", - "genericTypeParams": [], - "params": [ - "address" - ], - "_return": [ - "0x1::string::String" - ] - }, - { - "name": "set_message", - "visibility": "script", - "genericTypeParams": [], - "params": [ - "signer", - "vector" - ], - "_return": [] - } - ], - "structs": [ - { - "name": "MessageChangeEvent", - "isNative": false, - "abilities": [ - "drop", - "store" - ], - "genericTypeParams": [], - "fields": [ - { - "name": "from_message", - "type": "0x1::string::String" - }, - { - "name": "to_message", - "type": "0x1::string::String" - } - ] - }, - { - "name": "MessageHolder", - "isNative": false, - "abilities": [ - "key" - ], - "genericTypeParams": [], - "fields": [ - { - "name": "message", - "type": "0x1::string::String" - }, - { - "name": "message_change_events", - "type": "0x1::event::EventHandle<0x5af503b5c379bd69f46184304975e1ef1fa57f422dd193cdad67dc139d532481::message::MessageChangeEvent>" - } - ] - } - ] -} -``` - -Make a note of `"name": "message"`; we will use it in the next section. -
- -### Add module publishing instructions to the dapp - -As a convenience to the users, we can have the app display the `aptos move publish` command if the module does not exist. To do so, we will use the Aptos SDK to retrieve the account modules and look for one where `module.abi.name` equals `"message"` (i.e., the `"name": "message"` we saw in the Aptos Explorer). - -Update `src/App.tsx`: - -```typescript -function App() { - // ... - - // Check for the module; show publish instructions if not present. - const [modules, setModules] = React.useState([]); - React.useEffect(() => { - if (!address) return; - client.getAccountModules(address).then(setModules); - }, [address]); - - const hasModule = modules.some((m) => m.abi?.name === 'message'); - const publishInstructions = ( -
-      Run this command to publish the module:
-      
- aptos move publish --package-dir /path/to/hello_blockchain/ - --named-addresses hello_blockchain={address} -
- ); - - return ( -
- // ... - {!hasModule && publishInstructions} -
- ); -} -``` - -New users will be able to use this command to create a page for their account. - -In this step, we can also hide the **Publish Package** button when the module does exist. -Update the `button` on the `src/App.tsx` with: - -```typescript -function App() { - // ... - - return ( -
- // ... - {!hasModule && } -
- ); -} -``` - -## Step 5: Write a message on the blockchain - -Now that the module has been published, we are ready to use it to write a message on the blockchain. For this step we will use the `set_message` function exposed by the module. - -### A transaction that calls the `set_message` function - -The signature for `set_message` looks like this: - -```move -public(script) fun set_message(account: signer, message_bytes: vector) -``` - -To call this function, we need to use the `window.aptos` API provided by the wallet to submit a transaction. Specifically, we will create a `entry_function_payload` transaction that looks like this: - -```javascript -{ - type: "entry_function_payload", - function: "
::message::set_message", - arguments: ["Message to store"], - type_arguments: [] -} -``` - -There is no need to provide the `account: signer` argument. Aptos provides it automatically. - -However, we do need to specify the `message` argument: this is the `"Message to store"` in the transaction. - -### Use the `window.aptos` API to submit the `set_message` transaction - -Now that we understand how to use a transaction to call the `set_message` function, next we call this function from our app using `window.aptos.signAndSubmitTransaction()`. - -We will add: - -- A `