diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/async-mode.mdx b/HyperIndex_versioned_docs/version-v1/Advanced/async-mode.mdx deleted file mode 100644 index cc6e82a7..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/async-mode.mdx +++ /dev/null @@ -1,290 +0,0 @@ ---- -id: async-mode -title: Asynchronous handler mode -sidebar_label: Asynchronous Mode -slug: /async-mode ---- - -import Tabs from "@theme/Tabs"; -import TabItem from "@theme/TabItem"; - -`async-mode` allows you to run asynchronous actions in your event handlers that need to be completed in your indexing process. Example use cases for async mode are fetching token metadata from IPFS or fetching the token decimals for an erc20 token, but ultimately you can do anything inside an asynchronous handler. The next handler will only run once that promise has returned. - -The process of converting your syncronous handler to use `async-mode` requires 3 things: - -1. Add the `isAsync: true` to the `config.yaml` of any event handler that you want to be asynchronous. -2. Rename your `handler` function to `handlerAsync`. - In TypeScript, this is `Contract_Event_handlerAsync`. - In JavaScript and ReScript, it is `Contract.Event.handlerAsync`. - Full examples below. -3. Make the handler function a promise, and "await" any `get` or `get` calls on the context since they are promises too in async mode. - -## Implications of asynchronous mode - -Simply put, using asynchronous mode too much will slow down your indexer. If speed is not a big concern, you can use async mode more liberally. - -Regardless, if possible we recommend building the indexer with only blockchain events; and if you are using async mode, try to use it only for the most important things that you need to fetch. -If you are still in the process of developing your smart contracts we highly recommend adding more logs to your smart-contract codebase with more data so that you never have to reach for async mode to fetch onchain data. - -With async-mode, [`loaders`](/docs/HyperIndex/v1/event-handlers#loader-function) become optional. -If you forget to write them or add your entity to the loader, it will be loaded asyncronously from the database. -Once again, this has a performance cost. - -We ask that you use asynchronous mode with caution. -For example, if you forget to return your promise in your handler there could be unexpected behaviours since the next event might start getting processed before your async function has completed. -The other issue is that if an external promise fails it could totally block your indexer (eg. if a 3rd party API is unreachable for example). - -## Example - -As an example, we will modify the `Greeter` template (which can be generated with `envio init template -t greeter` command) to send the user who makes the greeting an address registry to get the ENS name of the user if it exists and generate an AI response message back. -The two asynchronous functions we'll use are `getEnsNameIfAvailable` and `generateAIResponse`. - -> **NOTE**: \_For simplicity we aren't doing any kind of error handling on the async functions. Ladies and gentlemen, handle your errors or the resulting frustration is on you. - -#### **config.yaml**: - -```diff -name: Greeter -description: Greeter indexer -contracts: - - name: Greeter - handler: ./src/EventHandlers.js - events: - - event: "NewGreeting(address user,string user)" -+ isAsync: true -``` - -#### **schema.graphql** (entity schema): - -```diff -type User { - id: ID! - greetings: [String!]! - latestGreeting: String! - numberOfGreetings: Int! -+ aiResponse: String! -} -``` - -#### **handlers**: - - - - -```javascript -GreeterContract.NewGreeting.handlerAsync(async ({ event, context }) => { - const userId = event.params.user; - const latestGreeting = event.params.greeting; - - const usersEnsOrAddress = await getEnsNameIfAvailable(userId); - const aiResponse = await generateAIResponse( - usersEnsOrAddress, - latestGreeting - ); - - const currentUserEntity = await context.User.get(userId); - - const userEntity: UserEntity = currentUserEntity - ? { - id: userId, - latestGreeting, - numberOfGreetings: currentUserEntity.numberOfGreetings + 1, - greetings: [...currentUserEntity.greetings, latestGreeting], - aiResponse, - } - : { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], - aiResponse, - }; - - context.User.set(userEntity); -}); -``` - -And here is a diff to highlight the change: - -```diff -- GreeterContract_NewGreeting_handler(({ event, context }) => { -+ GreeterContract_NewGreeting_handlerAsync(async ({ event, context }) => { - const userId = event.params.user; - const latestGreeting = event.params.greeting; - -+ const usersEnsOrAddress = await getEnsNameIfAvailable(userId) -+ const aiResponse = await generateAIResponse(usersEnsOrAddress, latestGreeting) - -- const currentUserEntity = context.User.get(userId); -+ const currentUserEntity = await context.User.get(userId); - - const userEntity: UserEntity = currentUserEntity - ? - { - id: userId, - latestGreeting, - numberOfGreetings: currentUserEntity.numberOfGreetings + 1, - greetings: [...currentUserEntity.greetings, latestGreeting], -+ aiResponse, - } - : - { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], -+ aiResponse, - }; - - context.User.set(userEntity); - }); -``` - - - - -```typescript -GreeterContract_NewGreeting_handlerAsync(async ({ event, context }) => { - const userId = event.params.user; - const latestGreeting = event.params.greeting; - - const usersEnsOrAddress = await getEnsNameIfAvailable(userId); - const aiResponse = await generateAIResponse( - usersEnsOrAddress, - latestGreeting - ); - - const currentUserEntity = await context.User.get(userId); - - const userEntity: UserEntity = currentUserEntity - ? { - id: userId, - latestGreeting, - numberOfGreetings: currentUserEntity.numberOfGreetings + 1, - greetings: [...currentUserEntity.greetings, latestGreeting], - aiResponse, - } - : { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], - aiResponse, - }; - - context.User.set(userEntity); -}); -``` - -And here is a diff to highlight the change: - -```diff -- GreeterContract_NewGreeting_handler(({ event, context }) => { -+ GreeterContract_NewGreeting_handlerAsync(async ({ event, context }) => { - const userId = event.params.user; - const latestGreeting = event.params.greeting; - -+ const usersEnsOrAddress = await getEnsNameIfAvailable(userId) -+ const aiResponse = await generateAIResponse(usersEnsOrAddress, latestGreeting) - -- const currentUserEntity = context.User.get(userId); -+ const currentUserEntity = await context.User.get(userId); - - const userEntity: UserEntity = currentUserEntity - ? - { - id: userId, - latestGreeting, - numberOfGreetings: currentUserEntity.numberOfGreetings + 1, - greetings: [...currentUserEntity.greetings, latestGreeting], -+ aiResponse, - } - : - { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], -+ aiResponse, - }; - - context.User.set(userEntity); - }); -``` - - - - -```rescript -Handlers.GreeterContract.NewGreeting.handlerAsync(async ({ event, context }) => { - let userId = event.params.user->Ethers.ethAddressToString - let latestGreeting = event.params.greeting - - let usersEnsOrAddress = await getEnsNameIfAvailable(userId) - let aiResponse = await generateAIResponse(usersEnsOrAddress, latestGreeting) - - let maybecurrentUserEntity = await context.User.get(userId) - - let userEntity: userEntity = switch maybecurrentUserEntity { - | Some(existingUserEntity) => { - id: userId, - latestGreeting, - numberOfGreetings: existingUserEntity.numberOfGreetings + 1, - greetings: existingUserEntity.greetings->Belt.Array.concat([latestGreeting]), - aiResponse, - } - | None => { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], - aiResponse, - } - } - - context.User.set(userEntity) -}) -``` - -And here is a diff to highlight the change: - -```diff -- Handlers.GreeterContract.NewGreeting.handler(({ event, context }) => { -+ Handlers.GreeterContract.NewGreeting.handlerAsync(async ({ event, context }) => { - let userId = event.params.user->Ethers.ethAddressToString - let latestGreeting = event.params.greeting - -+ let usersEnsOrAddress = await getEnsNameIfAvailable(userId) -+ let aiResponse = await generateAIResponse(usersEnsOrAddress, latestGreeting) - -- let maybecurrentUserEntity = context.User.get(userId) -+ let maybecurrentUserEntity = await context.User.get(userId) - - let userEntity: userEntity = switch maybecurrentUserEntity { - | Some(existingUserEntity) => { - id: userId, - latestGreeting, - numberOfGreetings: existingUserEntity.numberOfGreetings + 1, - greetings: existingUserEntity.greetings->Belt.Array.concat([latestGreeting]), - aiResponse, - } - | None => { - id: userId, - latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], - aiResponse, - } - } - - context.User.set(userEntity) - }) -``` - - - - -## Testing - -Testing async functions can be tricky if you have 3rd party dependencies. We would recommend mocking those with your mocking library of choice. -In the [testing framework](/docs/HyperIndex/v1/testing) itself, the only change is that `processEvent` is now `processEventAsync`, and it returns a promise rather than being synchronous. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/dynamic-contracts.md b/HyperIndex_versioned_docs/version-v1/Advanced/dynamic-contracts.md deleted file mode 100644 index 779e06b0..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/dynamic-contracts.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -id: dynamic-contracts -title: Loading Dynamic Contracts -sidebar_label: Dynamic Contracts / Factories -slug: /dynamic-contracts ---- - -# Dynamic Contracts / Factories - -If you have a system that does not know all the contracts that need indexing at the beginning i.e. you have a factory contract that dynamically creates new contracts over time, - you can use dynamic contracts. - - - -## Loader Function - -Contract factories are currently supported in the `loader` function of the event that you want to register the contract in. - -You can register a dynamic contract by including the following line inside the loader function: - -```javascript -context.contractRegistration.add() -``` - -> The syntax is exactly same for JavaScript, TypeScript and ReScript. - -## Example using a NFT factory - -In the NFT factory example, we want to dynamically register all the `SimpleNft` contracts that get created by the `NftFactory` contract, via `SimpleNftCreated` events. - -Both types of contracts will be defined in the configuration file, however address field will be omitted for the `SimpleNft` contract - address values will instead be retrieved from `SimpleNftCreated` event. - -### Config file - -```yaml -name: nftindexer -description: NFT Factory -networks: - - id: 1337 - start_block: 0 - contracts: - - name: NftFactory - abi_file_path: abis/NftFactory.json - address: 0x4675a6B115329294e0518A2B7cC12B70987895C4 - handler: src/EventHandlers.ts - events: - - event: SimpleNftCreated (string name, string symbol, uint256 maxSupply, address contractAddress) - - name: SimpleNft - abi_file_path: abis/SimpleNft.json - handler: src/EventHandlers.ts - events: - - event: Transfer (address from, address to, uint256 tokenId) -``` - -### Registering `SimpleNft` contracts in loader function for `SimpleNftCreated` event - -```javascript -context.contractRegistration.addSimpleNft(event.params.contractAddress); -``` - -> The syntax is exactly same for JavaScript, TypeScript and ReScript. - -For more information on how to write the event handlers file, go [here](../Guides/event-handlers.mdx). - -## Important Note - -When a dynamic contract is loaded, we load all the events in the block in which the contract was registered (even if they were from a previous transaction). Please let us know if this is an issue for you, as the team also has a solution where it only loads events after the event that loaded the contract. We decided this was better since many contracts emit an event upon creation, and this occurs before the contract is loaded (for example, in Uniswap v2). diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/generated-files.md b/HyperIndex_versioned_docs/version-v1/Advanced/generated-files.md deleted file mode 100644 index a2be9937..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/generated-files.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -id: generated-files -title: Generated Indexing Files -sidebar_label: Generated Indexing Files -slug: /generated-files ---- - -# Generated files - -The `/generated` directory contains essential files required for performing the indexing process in Envio. These files are automatically generated using the `envio codegen` CLI command and they should NOT be modified by the end user. - -If indexing errors occur, they are likely a result of issues in the generated files, which may point to an incorrect specification in the setup files (`config.yaml`, `schema.graphql` and `EventHandlers.*`) - -Once all setup file errors have been resolved, you can rerun the `envio codegen` command to re-generate the necessary indexing files. - -The variables used in the generated files adhere to the names specified in the configuration and schema files during the initial setup. This ensures consistency between the generated files and the contracts, events, and entities defined in the configuration and schema. - -The generated files are initially created in ReScript and then compiled to JavaScript (`.bs.js` extension) for runtime execution. - - ---- \ No newline at end of file diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/hypersync.md b/HyperIndex_versioned_docs/version-v1/Advanced/hypersync.md deleted file mode 100644 index 94c583d9..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/hypersync.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -id: hypersync -title: HyperSync as Data Source -sidebar_label: HyperSync Data Source -slug: /hypersync ---- - -> Beam me up, scotty! πŸ–– - -Envio HyperSync is our blazing-fast indexed layer on top of the blockchain that allows for hyper speed syncing. - -What would usually take hours to sync ~100,000 events can now be done in the order of less than a minute. - -HyperSync is the default method used by HyperIndex for all syncing. Visit [here](/docs/HyperSync/overview) to learn more about using the HyperSync python/ts/rust clients for further more custom needs of extracting data. - -Since this service is a layer above the blockchain we maintain and host this service for each supported network. - -You can find our list of supported networks [here](/docs/HyperSync/hypersync-supported-networks). - -## Greeter example - -```yaml -name: Greeter -description: Greeter indexer -networks: - - id: 137 # Polygon - start_block: 0 - contracts: - - name: PolygonGreeter - abi_file_path: abis/greeter-abi.json - address: 0x9D02A17dE4E68545d3a58D3a20BbBE0399E05c9c - handler: ./src/EventHandlers.bs.js - events: - - event: NewGreeting - - event: ClearGreeting -``` - -In the example above, absence of `rpc_config` will automatically direct Envio to HyperSync for the defined network (Polygon). - -For HyperSync users can use `start_block` of 0 regardless of when the deployments for the contracts to be indexed were, as HyperSync can very quickly detect the first block where it needs to start indexing from automatically. - ---- diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/labels.mdx b/HyperIndex_versioned_docs/version-v1/Advanced/labels.mdx deleted file mode 100644 index d380352e..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/labels.mdx +++ /dev/null @@ -1,109 +0,0 @@ ---- -id: labels -title: Loading Entities with Labels -sidebar_label: Entity labels -slug: /labels ---- - -import Tabs from "@theme/Tabs"; -import TabItem from "@theme/TabItem"; - -:::caution Deprecation Warning -This feature is planned for deprecation in a future release. We recommend not relying on it to facilitate easier upgrades in the future. -::: - -## Introduction - -In the basic section to get values loaded into your handler into you have used the `get` function. This is great when starting out, but when your indexer is further along you may want to use labels to make the connection between the `load` and `get` parts explicit. This can make your code look cleaner, and avoid sneaky bugs where the wrong entity is being loaded or the `load` and `get` function aren't matching up. - -For loading a single entity you will use a label, but for an array you can use the tag `arrayLabels` in `config.yaml` file. This is useful for groups of entitites that you want to have in your handler, and the entities stay ordered for easy iteration. For example, you may want to load two token entities for an event from an AMM pool that you are indexing. - -## Example - -The below example illustrates how to define an `arrayLabel` as well as a `label` for entity `A` that we want to load for `TestEvent` event. - -### `config.yaml` - -```yaml -networks: - - id: 12345 #Your chain Id - contracts: - - name: Greeter - # ... other fields - events: - - event: "TestEvent" - requiredEntities: - - name: "A" - arrayLabels: - - "allAs" - labels: - - "singleA" - # ... rest of your config file -``` - -### `schema.graphql` - -```graphql -type A { - id: ID! - someField: String! -} -``` - -### Event Handler file - - - - -```javascript -GreeterContract.TestEvent.loader(({ event, context }) => { - context.A.allALoad(["id1", "id2", "id3"]) - context.A.singleALoad("singled") - // ... other loaders -}) -GreeterContract.TestEvent.handler(({ event, context }) => { - let arrayOfAs = context.A.allA - let singleA = context.A.singleA - - // ... rest of the handler that uses or updates these entities. -}) -``` - - - - -```typescript -GreeterContract_TestEvent_loader(({ event, context }) => { - context.A.allALoad(["id1", "id2", "id3"]); - context.A.singleALoad("singled"); - // ... other loaders -}); -GreeterContract_TestEvent_handler(({ event, context }) => { - let arrayOfAs = context.A.allA; - let singleA = context.A.singleA; - - // ... rest of the handler that uses or updates these entities. -}); -``` - - - - -```rescript -GreeterContract.TestEvent.loader((event, context) => { - context.a.allALoad(["id1", "id2", "id3"]) - context.a.singleALoad("singled") - // ... other loaders -}) -GreeterContract.TestEvent.handler((event, context) => { - let arrayOfAs = context.a.allA - let singleA = context.a.singleA - - // ... rest of the handler that uses or updates these entities. -}) -``` - - - - ---- diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/linked-entity-loaders.mdx b/HyperIndex_versioned_docs/version-v1/Advanced/linked-entity-loaders.mdx deleted file mode 100644 index e05fbf0b..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/linked-entity-loaders.mdx +++ /dev/null @@ -1,126 +0,0 @@ ---- -id: linked-entity-loaders -title: Loading Linked Entities -sidebar_label: Loading Linked Entities -slug: /linked-entity-loaders ---- - -import Tabs from "@theme/Tabs"; -import TabItem from "@theme/TabItem"; - -## Introduction - -> _TLDR: When you have a schema that has entities that link/reference other entities (linked entities), you can use the `loaders` argument to load those linked entities from your handler._ - -Sometimes the entities that you want loaded in your handler are nested one or more layers deep on the entity that you have the ID for in the loader. - -Another prominant use case for linked entities: You may have a global entity (an entity that has a hardcoded ID) that references other entities. Without linked entity loaders, you wouldn't be able to load and access those referenced entities, so these types of loaders are extremely important for many types of advanced indexers. - -## Example - -The below schema shows 4 user-defined entities that reference/link to each other. What if the event only contains the ID of entity A, but we care about the important information entity `ImportantEntityICareAbout` that is linked via `A->B->C->ImportantEntityICareAbout`? - -### `schema.graphql` - -``` -type A { - id: ID! - b: B! - c: [C!]! @derivedFrom(field: "a") - optionalBigInt: BigInt -} -type B { - id: ID! - c: C! -} -type C { - id: ID! - a: A! - importantData: ImportantEntityICareAbout! -} -type ImportantEntityICareAbout { - id: ID! - otherData: String! -} -``` - -### `config.yaml` - -For illustration there is a `TestEvent`: - -```yaml -networks: - - id: 123456 - contracts: - - name: Gravatar - events: - - event: "TestEvent" -``` - -### Event Handler file - - - - -```javascript -GravatarContract.TestEvent.loader(({ event, context }) => { - context.A.load(event.params.id, { - loadB: { loadC: { loadImportantData: true }, loadA: {} }, - }); - // ... other loaders -}); - -GravatarContract.TestEvent.handler(({ event, context }) => { - const entityA = context.A.get(event.params.id); - const linkedB = context.A.getB(entityA); - const linkedC = context.B.getC(linkedB); - const importantDataEntity = context.C.getImportantData(linkedC); - // ... rest of the handler that uses or updates these entities. -}); -``` - - - - -```typescript -GravatarContract_TestEvent_loader(({ event, context }) => { - context.A.load(event.params.id, { - loadB: { loadC: { loadImportantData: true }, loadA: {} }, - }); - // ... other loaders -}); -GravatarContract_TestEvent_handler(({ event, context }) => { - const entityA = context.A.get(event.params.id); - const linkedB = context.A.getB(entityA); - const linkedC = context.B.getC(linkedB); - const importantDataEntity = context.C.getImportantData(linkedC); - // ... rest of the handler that uses or updates these entities. -}); -``` - - - - -```rescript -GravatarContract.TestEvent.loader((event, context) => { - context.a.exampleALoad( - event.params.id, - ~loaders: {loadB: {loadC: {loadImportantData: true}, loadA: {}}}, - ) - // ... other loaders -}) -GravatarContract.TestEvent.handler((event, context) => { - let entityA = context.A.get(event.params.id) - let linkedB = context.A.getB(entityA) - let linkedC = context.B.getC(linkedB) - let importantDataEntity = context.C.getImportantData(linkedC) - // ... rest of the handler that uses or updates these entities. -}) -``` - - - - -If the entity you want to load has linked entities of its own, then you need to put a `{}` (empty object next to the name to indicate stopping the recursive loading at that point), otherwise you must put a `true`. Thus in the above example, we put `true` next to the `loadImportantData`, and a `{}` next to the inner `loadA`. - -**Note:** Currently loading data from derived fields is unsupported. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/multichain-indexing.mdx b/HyperIndex_versioned_docs/version-v1/Advanced/multichain-indexing.mdx deleted file mode 100644 index 2f9781bc..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/multichain-indexing.mdx +++ /dev/null @@ -1,265 +0,0 @@ ---- -id: multichain-indexing -title: Multichain Indexing -sidebar_label: Multichain Indexing -slug: /multichain-indexing ---- - -import Tabs from "@theme/Tabs"; -import TabItem from "@theme/TabItem"; - -This page explains how to index from multiple chains in a single indexer. - -This means that events from contracts deployed on multiple chains can be used to create and update entities defined in the schema file. - -Users are required to populate the `network` section in `config.yaml` file for each chain and specify a contract to index from. - -Users can then specify event loader/handler for each of the contracts specified in `config.yaml` file. - -## Multichain Indexing on Greeter template - -### Config file - -```yaml -name: Greeter -description: Greeter indexer -#Global contract definitions that must contain all definitions except -#addresses. Now you can share a single handler/abi/event definitions -#for contracts across multiple chains -contracts: - - name: Greeter - abi_file_path: ./abis/greeter-abi.json - handler: ./src/EventHandlers.js - events: - - event: NewGreeting - - event: ClearGreeting -networks: - - id: 137 # Polygon - start_block: 45336336 - contracts: - - name: Greeter #A reference to the global contract definition - address: "0x9D02A17dE4E68545d3a58D3a20BbBE0399E05c9c" - - id: 59144 # Linea - start_block: 367801 - contracts: - - name: Greeter #A reference to the global contract definition - address: "0xdEe21B97AB77a16B4b236F952e586cf8408CF32A" -``` - -The Greeter indexer listens to `NewGreeting` and `ClearGreeting` events from `Greeter` contract (which is defined above networks as a "global contract") to update the `Greeting` entity. - -Notice how the global definition of the "Greeter" contract does not contain any addresses. And in the contracts section of both Polygon and Linea networks, they simply reference the name of the contract ("Greeter") and define the address. Both of these will use the same handler functions and events. - -### Schema file - -```graphql -type User { - id: ID! # user's account address - greetings: [String!]! # list of greetings made by the user - latestGreeting: String! # most recent greeting - numberOfGreetings: Int! # total number of greetings made -} -``` - -> Dev note: πŸ“’ When it makes sense, we recommend appending the chain id to the entity id when you are developing multichain indexers. For example if you deploy a contract to two different networks with the same contract address then append `-` to the end of the id to differentiate the contracts between different networks. - -### Event Handler file - - - - -```javascript -let { GreeterContract } = require("../generated/src/Handlers.bs.js"); - -GreeterContract.NewGreeting.loader((event, context) => { - context.User.load(event.params.user); -}); - -GreeterContract.NewGreeting.handler((event, context) => { - let user = event.params.user; - let latestGreeting = event.params.greeting; - let numberOfGreetings = event.params.numberOfGreetings; - - let existingUser = context.User.get(event.params.user); - - if (existingUser !== undefined) { - context.User.set({ - id: user, - latestGreeting: latestGreeting, - numberOfGreetings: existingUser.numberOfGreetings + 1, - greetings: [...existingUser.greetings, latestGreeting], - }); - } else { - context.User.set({ - id: user, - latestGreeting: latestGreeting, - numberOfGreetings: 1, - greetings: [latestGreeting], - }); - } -}); - -GreeterContract.ClearGreeting.loader((event, context) => { - context.User.load(event.params.user); -}); - -GreeterContract.ClearGreeting.handler((event, context) => { - let existingUser = context.User.get(event.params.user); - if (existingUser !== undefined) { - context.User.set({ - id: user, - latestGreeting: "", - numberOfGreetings: existingUser.numberOfGreetings + 1, - greetings: existingUser.greetings, - }); - } -}); -``` - - - - -```typescript -import { - GreeterContract_NewGreeting_loader, - GreeterContract_NewGreeting_handler, - GreeterContract_ClearGreeting_loader, - GreeterContract_ClearGreeting_handler, -} from "../generated/src/Handlers.gen"; - -import { UserEntity } from "../generated/src/Types.gen"; - -GreeterContract_NewGreeting_loader(({ event, context }) => { - context.User.load(event.params.user.toString()); -}); - -GreeterContract_NewGreeting_handler(({ event, context }) => { - let currentUser = context.User.get(event.params.user); - - if (currentUser !== undefined) { - let userObject: UserEntity = { - id: event.params.user.toString(), - latestGreeting: event.params.greeting, - numberOfGreetings: currentUser.numberOfGreetings + 1, - greetings: [...currentUser.greetings, event.params.greeting], - }; - - context.User.set(userObject); - } else { - let userObject: UserEntity = { - id: event.params.user.toString(), - latestGreeting: event.params.greeting, - numberOfGreetings: 1, - greetings: [event.params.greeting], - }; - context.User.set(userObject); - } -}); - -GreeterContract_ClearGreeting_loader(({ event, context }) => { - context.User.load(event.params.user.toString()); -}); - -GreeterContract_ClearGreeting_handler(({ event, context }) => { - let currentUser = context.User.get(event.params.user); - - if (currentUser !== undefined) { - let userObject: UserEntity = { - id: event.params.user.toString(), - latestGreeting: "", - numberOfGreetings: currentUser.numberOfGreetings, - greetings: currentUser.greetings, - }; - - context.User.set(userObject); - } -}); -``` - - - - -```rescript -open Types - -Handlers.GreeterContract.NewGreeting.loader(({ event, context }) => { - context.greeting.load(event.params.user->Ethers.ethAddressToString) -}) - -Handlers.GreeterContract.NewGreeting.handler(({ event, context }) => { - let currentUserOpt = context.greeting.get(event.params.user->Ethers.ethAddressToString) - - switch currentUserOpt { - | Some(existingUser) => { - let userObject: userEntity = { - id: event.params.user->Ethers.ethAddressToString, - latestGreeting: event.params.greeting, - numberOfGreetings: existingUser.numberOfGreetings + 1, - greetings: existingUser.greetings->Belt.Array.concat([event.params.greeting]), - } - - context.greeting.set(userObject) - } - - | None => - let userObject: userEntity = { - id: event.params.user->Ethers.ethAddressToString, - latestGreeting: event.params.greeting, - numberOfGreetings: 1, - greetings: [event.params.greeting], - } - - context.greeting.set(userObject) - } -}) - -Handlers.GreeterContract.ClearGreeting.loader(({ event, context }) => { - context.greeting.load(event.params.user->Ethers.ethAddressToString) - () -}) - -Handlers.GreeterContract.ClearGreeting.handler(({ event, context }) => { - let currentUserOpt = context.greeting.get(event.params.user->Ethers.ethAddressToString) - - switch currentUserOpt { - | Some(existingUser) => { - let userObject: userEntity = { - id: event.params.user->Ethers.ethAddressToString, - latestGreeting: "", - numberOfGreetings: existingUser.numberOfGreetings, - greetings: existingUser.greetings, - } - - context.greeting.set(userObject) - } - - | None => () - } -}) -``` - - - - -## `Unordered Multichain Mode` - -To activate "Unordered Multichain Mode", add a field to your config.yaml file like this: - -```yaml -unordered_multichain_mode: true -networks: ... -``` - -Or you can set it via environment variable (this would take precedence over config): - -```sh -UNORDERED_MULTICHAIN_MODE=true -``` - -By default the indexer will synchronise the ordering of events across chains, ensuring that indexing is always deterministic and events across networks will be processed in the same order on every indexer run. This deterministic synchronisation is important for some applications, but for others it is not. This is why we have added "unordered multichain mode". - -In order for events to be perfectly ordered across multiple networks, the indexer needs to wait for all blocks to increment from each network. This is because the indexer needs to determine which block came first. Hence there is a latency between when an event is emitted and when it is processed by the indexer based on the block interval of the slowest network. - -Generally, if operations to update an entity from multiple events across multiple networks are [commutative](./terminology#commutative-property) (where the ordering of operations doesnt effect the result) then the ordering of events doesn't need to be ordered and the unordered multichain mode can be set to true. Likewise if there is no overlap between entities from different networks then the unordered multichain mode can be set to true as the ordering of events across the networks is not important. - -If unordered multichain mode is set to true, then the indexer will process events as soon as they are emitted, and so the indexer will not wait for all blocks to increment from each network before moving on to the next block. Even though events will remain ordered for a given network, events from different networks can be processed out of order, but it also means that events will be processed as soon as they are emitted. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/performance/database-performance-optimization.md b/HyperIndex_versioned_docs/version-v1/Advanced/performance/database-performance-optimization.md deleted file mode 100644 index 7e529c2a..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/performance/database-performance-optimization.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -id: database-performance-optimization -title: Database Performance -sidebar_label: Database Performance -slug: /database-performance-optimization ---- - -## Database Performance Optimization - -### Creating Custom Indices - -Indices are essential for optimizing database performance, especially when dealing with large datasets. By creating indices, you can significantly speed up query times. Here's how to define custom indices in your schema. - -#### Single Column Indices - -Define an entity and use the `@index` directive on fields you wish to add an index to. - -```graphql -type MyEntity { - id: ID! - userAddress: String! @index - tokenAddress: String! @index -} -``` - -The fields marked with `@index` will create indices in your database, making queries on these fields much faster. - -#### Composite Indices - -You can also group fields into one composite index: - -```graphql -type MyEntity @index(fields: ["userAddress", "tokenAddress"]) { - id: ID! - userAddress: String! - tokenAddress: String! -} -``` - -This will create a composite index on both of these fields, which is particularly useful for queries that filter on both `userAddress` and `tokenAddress`. - -#### Automatic Indices - -Please note that all `id` fields and `@derivedFrom` fields already have indices automatically created, so there is no need to add a custom index on them. - -#### Example - -```graphql -type Token - @index(fields: ["id", "tokenId"]) - @index(fields: ["tokenId", "collection"]) { - id: ID! - tokenId: BigInt! @index - collection: NftCollection! @index - owner: User! -} -``` - -The above example shows how to create single field indices (`@index`) or multi-field indices (`@index(fields: ["id", "tokenId"])`). - -### When and Why to Use Indices - -#### Single Column Indices - -- **When to use**: Use single column indices when you frequently query a table based on a single field. -- **Why to use**: Single column indices improve the performance of queries filtering or sorting on a single column. - -#### Composite Indices - -- **When to use**: Use composite indices when you frequently query a table based on multiple fields. -- **Why to use**: Composite indices improve the performance of queries that filter or sort on multiple columns simultaneously. They are especially useful when the combined columns are frequently used together in queries. - -### Impact on Write Times and Storage - -- **Write Times**: Creating indices has a minor impact on write times. Each write operation needs to update the index, but this impact is generally negligible. -- **Storage**: Indices use additional storage. The storage required depends on the number and type of indices created. - -### Optimizing Schema and Queries - -#### Structuring Schema - -- Ensure fields frequently used in queries are indexed. -- Use composite indices for queries filtering on multiple fields. -- Avoid unnecessary indices on fields rarely used in queries. - -#### Optimizing Hasura GraphQL Queries - -- **Retrieve Changed Entities**: When polling for updates, retrieve only changed entities to minimize data transfer and processing. -- **Use Timestamps**: Utilize timestamps to fetch only the entities that have changed since the last query. - -#### Example Query - -```graphql -query getUpdatedTokens($lastFetched: timestamptz!) { - Token(where: { updatedAt: { _gt: $lastFetched } }) { - id - tokenId - collection - owner - } -} -``` - -In this example, `updatedAt` is a timestamp field used to fetch tokens updated since the last fetch. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/performance/historical-sync.md b/HyperIndex_versioned_docs/version-v1/Advanced/performance/historical-sync.md deleted file mode 100644 index 1e4ac41b..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/performance/historical-sync.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -id: historical-sync -title: Historical Sync -sidebar_label: Historical Sync -slug: /historical-sync ---- - -# Historical Sync - -Historical sync is an important aspect of maintaining and populating your database. Here's an overview of how historical sync works with HyperIndex: - -## Optimized HyperSync Backend Nodes - -- **Fast Retrieval**: Historical sync is fast when retrieving data from our optimized HyperSync backend nodes. These nodes are specifically designed to handle large amounts of historical data efficiently. - -## RPC Endpoint Limitations - -- **Slow Performance**: Unfortunately, historical sync for chains using RPC endpoints is not fast. The process is slower due to the limitations of RPC endpoints. -- **No Local Caching**: We do not have any form of local caching at the moment. This feature was disabled in a previous release due to stability concerns. - -### Loaders and Historical Sync - -- **Improved Speed**: Using loaders can improve the speed of historical sync. However, the effectiveness of loaders varies: - - **Entity Reuse**: Loaders are more beneficial when the same entities are frequently reused and updated. - - **Performance Impact**: For many indexes, the performance difference with loaders is not very large. Loaders are not highly important unless performance is of the utmost importance. - -### Future Improvements (v2) - -- **preHandlers**: In version 2, we are restructuring the way loaders work into pre-handlers. This change will make loading entities before the handler in batches more ergonomic using preHandler functions, further enhancing the performance and ease of use for historical sync. - -By understanding these factors, you can better optimize the performance of your historical sync processes. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/performance/index.md b/HyperIndex_versioned_docs/version-v1/Advanced/performance/index.md deleted file mode 100644 index a3301e0d..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/performance/index.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -id: index -title: Performance Optimization -sidebar_label: Performance -slug: /performance ---- - -## Quick Navigation - -- [Database Performance Optimization](./database-performance-optimization) -- [Historical Sync](./historical-sync) -- [Latency at the Head](./latency-at-head) - -There are different types of performance that you may be concerned about when indexing using HyperIndex. There is the speed of historical sync (populating the db from scratch), the latency of indexing new blocks/transactions, and the speed of querying the data once it is indexed. This page will cover some of the ways you can optimize the performance of your HyperIndex setup. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/performance/latency-at-head.md b/HyperIndex_versioned_docs/version-v1/Advanced/performance/latency-at-head.md deleted file mode 100644 index 9fb3c0f0..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/performance/latency-at-head.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -id: latency-at-head -title: Latency at the Head -sidebar_label: Latency at the Head -slug: /latency-at-head ---- - -# Latency at the head - -Maintaining low latency at the head of the chain is crucial for ensuring timely data updates. Here's an overview of how we handle latency at the head with HyperSync: - -### Efficient Block Pulling from HyperSync - -- **Efficient Process**: At the head, we currently pull new blocks from HyperSync, which is a highly efficient process. This ensures that we stay up-to-date with the latest blocks with minimal delay. -- **Reliability**: Typically, this process runs smoothly without any significant issues. -- **Backups**: We have an on-going project to sync new blocks from both RPC and Hypersync to improve the robustness in the unlikely event of a failure in HyperSync. - -### Low Latency on Popular Networks - -- **Prioritized Networks**: We have put a lot of effort into maintaining extremely low latency on popular networks such as Ethereum, Optimism, and Arbitrum. Users should not experience any noticeable latency on these networks. -- **User Experience**: Our focus on these networks ensures a seamless experience for users relying on timely data updates. - -### Smaller Chains - -- **Lower Priority**: On some smaller chains, we haven't prioritized low latency to the same extent. As a result, there might be slightly higher latency on these networks. -- **Feedback**: If low latency on smaller chains is a concern for you, please let our team know in Discord. Your feedback helps us prioritize improvements. - -### Unordered Multi-Chain Mode - -- [**Docs**](./multichain-indexing#unordered-multichain-mode) -- **Multi-Chain Indexes**: For users with extremely multi-chain indexes, we offer an unordered multi-chain mode. -- **Continued Syncing**: In this mode, even if one chain experiences latency, the other chains will continue syncing as normal, ensuring that your data remains up-to-date across multiple networks. - -### Reorg Support - -- **Reorg Handling**: We have reorg support in place and are currently in the final phases of testing this feature. -- **Concerns**: If reorg support is a concern for you, please reach out to our team on Discord. We will have official documentation for reorgs available shortly. - -### Hosted Service - -We have ongoing projects to keep improving the sync and build times of the hosted service. Currently the indexers do run slightly slower on the hosted service than they may on a powerful laptop. If you are looking for a beefy hosting solution please contact us on Discord, and we can discuss our enterprise plans. - -By leveraging these features and providing feedback, you can help us maintain and improve the performance of your HyperIndex setup. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/persisted_files.md b/HyperIndex_versioned_docs/version-v1/Advanced/persisted_files.md deleted file mode 100644 index edba0a77..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/persisted_files.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -id: persisted-files -title: Persisted File Changes -sidebar_label: Persisted File Changes -slug: /persisted_files ---- - -# Persisted File Changes - -In Envio, users define four distinct files that dictate the behavior of the indexer: - -- Configuration file (`config.yaml`) -- Smart Contract ABI (`contractName.json`) -- Schema (`schema.graphql`) -- Event Handlers (`EventHandlers.\*`) - -Envio employs these files to automatically generate and execute the indexing logic. - -To enhance user experience and optimize the re-syncing of historical blocks, the indexer identifies the most efficient rerun sequence when any of the above files are modified. - -## Rerun sequences - -**1. Re-generate All Indexing Code and Re-sync from RPC Nodes:** - -Triggered only when the indexing logic needs regeneration due to changes in the configuration file or smart contract ABI. - -**2. Re-generate All Indexing Code and Re-sync from Stored Raw Events:** - -Activated when the schema is updated but neither the configuration nor smart contract ABI has changed. This allows for faster re-syncing using previously collected event data. - -**3. Re-sync from Stored Events:** - -Executed when only the event handlers are updated. Utilizes existing event data for re-syncing. - -**4. Continue Syncing from RPC Nodes:** - -Runs when there are no modifications in any of the user-defined files mentioned above. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/rpc-sync.md b/HyperIndex_versioned_docs/version-v1/Advanced/rpc-sync.md deleted file mode 100644 index 2e9eb0b3..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/rpc-sync.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -id: rpc-sync -title: RPC as Data Source -sidebar_label: RPC Data Source -slug: /rpc-sync ---- - -For syncing of events to be indexed, users are required to use RPC endpoints to retrieve events from each different chain. - -RPC sync configuration is **optional** for the user, and if not defined, the indexer will use default values for the RPC endpoint. - -Users can configure their own sync behaviour for each RPC endpoint used for additional control. - -The following attributes can be defined for the sync config of each RPC endpoint: - -- `initial_block_interval`: Initial block interval which the indexer will use to make RPC requests -- `backoff_multiplicative`: After an RPC error, factor to scale back the number of blocks requested at once -- `acceleration_additive`: Without RPC errors or timeouts, how much to increase the number of blocks requested for the next batch -- `interval_ceiling`: Maximum block interval that is allowed for any request on the RPC -- `backoff_millis`: After an error, how long to wait before retrying in milliseconds -- `query_timeout_millis`: How long to wait in milliseconds before cancelling an RPC request due to timeout - -Sync configuration for each RPC endpoint should be defined in `config.yaml` file. -Below is an example of how sync configuration per RPC endpoint can be defined: - -```yaml -- id: 1 - rpc_config: - url: https://eth.com # RPC URL that will be used to subscribe to blockchain data on this network - unstable__sync_config: - initial_block_interval: 10000 # Integer - backoff_multiplicative: 0.8 # Float - acceleration_additive: 2000 # Integer - interval_ceiling: 10000 # Integer - backoff_millis: 5000 # Integer - query_timeout_millis: 20000 # Integer - start_block: 0 -``` - ---- - -## Granular RPC caching and failover - -For a more granular approach of handling RPC failovers, permenant caching, auto-batching, etc. you can use [eRPC](https://github.com/erpc/erpc) with [envio HyperRPC](http://localhost:3001/docs/HyperSync/overview-hyperrpc) or other rpc endpoints as upstream source. - -[eRPC](https://github.com/erpc/erpc) is a fault-tolerant EVM RPC proxy and re-org aware permanent caching solution, specifically built for read-heavy use-cases like data indexing and high-load frontend usage. - -### Quickstart Guide - -1. **Create your [`erpc.yaml`](https://docs.erpc.cloud/config/example) configuration file**: - - ```yaml - logLevel: debug - projects: - - id: main - upstreams: - # This will automatically add all supported EVM chains by HyperRPC. - - endpoint: evm+envio://rpc.hypersync.xyz - # you can add other rpc endpoints for fallback - - endpoint: https://eth-1.com - - endpoint: https://eth-2.com - - endpoint: https://eth-3.com - ``` - - Refer to [a complete config example](https://docs.erpc.cloud/config/example) for further details and customization options. - -2. **Use the Docker image**: - - ```bash - docker run -v $(pwd)/erpc.yaml:/root/erpc.yaml -p 4000:4000 -p 4001:4001 ghcr.io/erpc/erpc:latest - ``` - - Or run eRPC as a service and as part of your current `docker-compose.yaml` configs. - - ```yaml - services: - ... - - erpc: - image: ghcr.io/erpc/erpc:latest - platform: linux/amd64 - volumes: - - "${PWD}/erpc.yaml:/root/erpc.yaml" - ports: - - 4000:4000 - - 4001:4001 - restart: always - ``` - -3. **Set the eRPC URL in the `envio config yaml`** - -```yaml -- id: 1 - rpc_config: - url: http://erpc:4000/main/evm/1 - start_block: 0 - ... -``` - -Once configured, all RPC requests will be routed through eRPC, which will manage caching, failover, auto-batching, rate-limiting, auto-discovery of node providers, and more behind the scenes, providing a more resilient and efficient indexing solution as opposed to using a single RPC source. Using HyperSync will still perform indexing faster as opposed to RPC based solutions. diff --git a/HyperIndex_versioned_docs/version-v1/Advanced/terminology.md b/HyperIndex_versioned_docs/version-v1/Advanced/terminology.md deleted file mode 100644 index 46687bb5..00000000 --- a/HyperIndex_versioned_docs/version-v1/Advanced/terminology.md +++ /dev/null @@ -1,112 +0,0 @@ ---- -id: terminology -title: Terminology -sidebar_label: Terminology -slug: /terminology ---- - -### Address -A unique identifier representing an account or entity within a blockchain network. -### API -Application Programming Interface, a set of protocols and tools for building software applications. -### Block -A collection of data containing a set of transactions that are bundled together and added to the blockchain. -### Codegen -The process of automatically generating code based on a given input, often used in blockchain development for generating client libraries or interfaces. -### Commutative property -The **commutative property** is a fundamental property of certain binary operations in mathematics. An operation is said to be commutative if the order in which you apply the operation to two operands does not affect the result. In other words, for a commutative operation: - -a + b = b + a - -Here are some common examples of commutative operations: - -1. **Addition (a + b):** - 2 + 3 = 3 + 2 - -2. **Multiplication (a \times b):** - 2 * 3 = 3 * 2 - -However, not all operations are commutative. Subtraction and division are examples of non-commutative operations: - -1. **Subtraction (a - b):** - 5 - 3 != 3 - 5 - -2. **Division (a / b):** - 8 / 4 != 4 / 8 - -3. **String Concatenation:** - "Hello" + "World" != "World" + "Hello" - -It's important to note that the commutative property is a property of the operation itself, not necessarily the numbers involved. If an operation is commutative, it means you can switch the order of the operands without changing the result. - -For example, in addition, it doesn't matter whether you add 2 and 3 or 3 and 2; the result is the same. - -### Endpoint -A URL that can be used to query an Envio custom API. -### Envio CLI -A command line interface tool for building and deploying to Envio indexers. -### Event -An event is specific occurrence or action within a blockchain system that is specified in smart contracts, and can be used to emit data from the blockchain. Conversely, smart contracts can emit events to essentially communicate that something has happened on the blockchain. - -Web applications or any kind of application (e.g. mobile app, backend job, etc.) can listen to events and take actions when they occur. Events are typically data that are not stored on-chain as it would be considerably more expensive to store. - -Here is an example of declaring an event, and then emitting the same event once the event occurs: - -Declaring an event: -`event Deposit(address indexed _from, bytes32 indexed _id, uint _value);` - -Emitting an event: -`emit Deposit(msg.sender, _id, msg.value);` -### Event Handler -A function that listens for a specific event from a smart contract and either updates or inserts new data into your Envio API. -### EVM -Ethereum Virtual Machine (EVM), a runtime environment that executes smart contracts on the Ethereum blockchain. -### EVM compatible -The ability for a blockchain to run the EVM and execute Ethereum smart contracts. In the context of Envio, it's the ability to deploy a unified api to retrieve data from multiple EVM-compatible blockchains (e.g. Ethereum, BSC, Arbitrum, Polygon, Avalanche, Optimism, Fantom, Cronos, etc.) -### GraphQL -A query language for interacting with APIs, commonly used in blockchain systems for retrieving specific data from the blockchain platforms. As an alternative to REST, GraphQL lets developers construct requests that pull data from multiple data sources in a single API call. -### GraphQL API -The data presentation part of Envio indexer. Typically, it's a GraphQL API auto-generated from the schema file. -### Hosted Service -A managed service platform for building, hosting and querying Envio's Indexers with guaranteed uptime and performance service level agreements. -### Indexer -A specialized database management system (DBMS) that indexes and organises blockchain data, making it easier for developers to efficiently query, retrieve, and utilise on-chain data. Web2 apps usually rely on indexers like Google to pre-sort information into indices for data retrieval and filtering. In blockchain and in Web3, applications need to indexers to achieve data retrieval in the same way. -### JavaScript -JavaScript is a high-level, interpreted programming language that is primarily used for client-side scripting in web browsers. It is the de facto language for web development and has been around for a long time. It is the language of the web, enabling developers to create interactive and dynamic web applications. -### Node -A device or computer that participates in a blockchain network, maintaining a copy of the blockchain and validating transactions. -### Ploffen -Ploffen means "Pop" in Dutch and is a fun game based on an ERC20 token contract, where users can deposit a game token (i.e. make a contribution) into a savings pool. - -The last user to add a contribution to the savings pool has a chance of winning the entire savings pool if no other user doesn't subsequently deposit a contribution within 1 hour of the previous contribution made. For example, if 30 persons play the game, and each person contributes a small amount, the last person can stand a chance of winning the *total contributions* made by all 30 persons in the savings pool. - -The Ploffen project demonstrates a Hardhat framework example. It comes with a sample contract, a test for that contract, a script that deploys that contract, as well as the Envio integration to index emitted events from the Ploffen smart contract. - -### Query -A request for data. In the case of Envio, a query is a request for data from a Envio API that will be answered by an Envio Indexer. -### ReScript -ReScript is robustly typed language that compiles to efficient and human-readable JavaScript. ReScript aims to bring the power and expressiveness of functional programming to JavaScript development. It offers a seamless integration with JavaScript and provides features like static typing, pattern matching, and immutable data structures. -### Schema File -A file used to define entities based on events emitted from the smart contract and the data types for these entities. -### SDK -Software Development Kit” is a collection of tools, libraries, and documentation that facilitates the development of applications for a specific platform or system. -### Smart Contract -Smart contracts are a type of Ethereum account. This means they have a balance and can be the target of transactions. However they're not controlled by a user, instead they are deployed to the network and run as programmed. User accounts can then interact with a smart contract by submitting transactions that execute a function defined on the smart contract. -### Tokens -A digital representation of an asset or utility within a blockchain system that follows a specific standard. The ERC-20 token for example, is a standard for creating and issuing smart contracts for fungible tokens on the EVM-compatible blockchains. -### Transaction -An action or set of actions recorded on the blockchain, typically involving the transfer of assets, execution of smart contracts, or other network interactions. -### TypeScript -TypeScript is a superset of JavaScript that adds static typing and other advanced features to the language. It compiles down to plain JavaScript, making it compatible with existing JavaScript codebase. TypeScript helps developers catch errors during development by providing type checking and improved tooling support. It enhances JavaScript by adding features like interfaces, classes, modules, and generics. - - - - - - - - - - - - diff --git a/HyperIndex_versioned_docs/version-v1/Examples/example-ens.md b/HyperIndex_versioned_docs/version-v1/Examples/example-ens.md deleted file mode 100644 index dd9e508e..00000000 --- a/HyperIndex_versioned_docs/version-v1/Examples/example-ens.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -id: example-ens -title: ENS Indexer -sidebar_label: ENS Indexer -slug: /example-ens ---- - -# ENS Indexer - -This [repo](https://github.com/JasoonS/Envio-ENS-Indexer) contains an Envio indexer template built using TypeScript for indexing events from the ENS (Ethereum Name Service) contracts. - -The indexer has been built using v0.0.37 of Envio. - -## Steps to run the indexer - -1. Clone the [repo](https://github.com/JasoonS/Envio-ENS-Indexer) -2. Install any other pre-requisite packages for Envio listed [here](https://docs.envio.dev/docs/installation#prerequisites) -3. Install with `pnpm i` -4. Generate indexing code via `pnpm codegen` -5. Run the indexer via `pnpm dev` (make sure you have Docker running) -6. Stop the indexer via `pnpm envio stop` diff --git a/HyperIndex_versioned_docs/version-v1/Examples/example-liquidation-metrics.md b/HyperIndex_versioned_docs/version-v1/Examples/example-liquidation-metrics.md deleted file mode 100644 index 12e9548b..00000000 --- a/HyperIndex_versioned_docs/version-v1/Examples/example-liquidation-metrics.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -id: example-liquidation-metrics -title: Compound V2 Liquidation Metrics -sidebar_label: Compound V2 Liquidation Metrics -slug: /example-liquidation-metrics ---- - -# Compound V2 Liquidation Metrics - -Note: This example is built on version 0.0.21 (current version is >= 0.0.36). - -This [repo](https://github.com/enviodev/liquidation-metrics) contains an example Envio indexer built using TypeScript for the Compound V2 forks across multiple chains. - -This repo was forked from the [original indexer](https://github.com/JossDuff/liquidation-metrics/) built by Joss Duff, one of Envio's first grantees. - -`LiquidateBorrow` event from the pool contracts of below Compound V2 forks are indexed, specifically storing the amount of tokens seized and liquidators. - -Addresses of all the pool contracts are stored in the `config.yaml` file. - -## Protocols indexed - -1. Compound V2 on Ethereum Mainnet -2. Flux Finance on Ethereum Mainnet -3. Iron Bank on Ethereum Mainnet -4. Strike Finance on Ethereum Mainnet -5. Iron Bank on Optimism -6. Sonne Finance on Optimism -7. Benqi Lending on Avalanche C-chain -8. Iron Bank on Avalanche C-chain -9. Venus on BSC - -The indexer has been built using v0.0.21 of Envio. - -## Steps to run the indexer - -1. Clone the [repo](https://github.com/enviodev/liquidation-metrics) -2. Install any other pre-requisite packages for Envio listed [here](https://docs.envio.dev/docs/installation#prerequisites) -3. Install Envio via `npm i -g envio@v0.0.21` -4. Generate indexing code via `envio codegen` -5. Run the indexer via `envio dev` (make sure you have Docker running) -6. Stop the indexer via `pnpm envio stop` diff --git a/HyperIndex_versioned_docs/version-v1/Examples/example-sablier-v2.md b/HyperIndex_versioned_docs/version-v1/Examples/example-sablier-v2.md deleted file mode 100644 index 0e6baca3..00000000 --- a/HyperIndex_versioned_docs/version-v1/Examples/example-sablier-v2.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -id: example-sablier-v2 -title: Sablier V2 -sidebar_label: Sablier V2 -slug: /example-sablier-v2 ---- - -# Sablier V2 indexer - -This [Sablier repo](https://github.com/sablier-labs/v2-subgraphs/tree/main/apps/protocol-envio) shows a comprehensive example of Hyperindex being used, indexing 9 different chains. - -Sablier V2 is a protocol designed for on-chain money streaming and token distribution. DAOs and businesses use Sablier for vesting, payroll, airdrops, and more. diff --git a/HyperIndex_versioned_docs/version-v1/Examples/example-uniswap-v3.md b/HyperIndex_versioned_docs/version-v1/Examples/example-uniswap-v3.md deleted file mode 100644 index ff6dc149..00000000 --- a/HyperIndex_versioned_docs/version-v1/Examples/example-uniswap-v3.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -id: example-uniswap-v3 -title: Uniswap V3 -sidebar_label: Uniswap V3 -slug: /example-uniswap-v3 ---- - -# Uniswap V3 - -> This is a development repo that is undergoing continual changes for benchmarking purposes. - -This [repo](https://github.com/enviodev/uniV3-swaps) contains an example Envio indexer built using TypeScript for the [Uniswap V3 USDC / ETH -0.05% pool](https://etherscan.io/address/0x88e6a0c2ddd26feeb64f039a2c41296fcb3f5640) deployed on Ethereum Mainnet. - -`Swap` events from the contract are indexed as entities and the `LiquidityPool` entity is updated on each `Swap` event to track cumulative statistics for the pool. - -The indexer has been built using v0.0.21 of Envio. - -## Steps to run the indexer - -1. Clone the [repo](https://github.com/enviodev/uniV3-swaps) -1. Install any other pre-requisite packages for Envio listed [here](https://docs.envio.dev/docs/installation#prerequisites) -1. Install Envio via `npm i -g envio@v0.0.21` -1. Generate indexing code via `envio codegen` -1. Run the indexer via `envio dev` (make sure you have Docker running) -1. Stop the indexer via `pnpm envio stop` diff --git a/HyperIndex_versioned_docs/version-v1/Guides/cli-commands.md b/HyperIndex_versioned_docs/version-v1/Guides/cli-commands.md deleted file mode 100644 index 2d0bd231..00000000 --- a/HyperIndex_versioned_docs/version-v1/Guides/cli-commands.md +++ /dev/null @@ -1,285 +0,0 @@ ---- -id: cli-commands -title: Envio CLI -sidebar_label: Envio CLI -slug: /cli-commands ---- - -# Command-Line Help for `envio` - -This document contains the help content for the `envio` command-line program. - -**Command Overview:** - -* [`envio`↴](#envio) -* [`envio init`↴](#envio-init) -* [`envio init template`↴](#envio-init-template) -* [`envio init contract-import`↴](#envio-init-contract-import) -* [`envio init contract-import explorer`↴](#envio-init-contract-import-explorer) -* [`envio init contract-import local`↴](#envio-init-contract-import-local) -* [`envio init fuel`↴](#envio-init-fuel) -* [`envio init fuel template`↴](#envio-init-fuel-template) -* [`envio dev`↴](#envio-dev) -* [`envio stop`↴](#envio-stop) -* [`envio codegen`↴](#envio-codegen) -* [`envio local`↴](#envio-local) -* [`envio local docker`↴](#envio-local-docker) -* [`envio local docker up`↴](#envio-local-docker-up) -* [`envio local docker down`↴](#envio-local-docker-down) -* [`envio local db-migrate`↴](#envio-local-db-migrate) -* [`envio local db-migrate up`↴](#envio-local-db-migrate-up) -* [`envio local db-migrate down`↴](#envio-local-db-migrate-down) -* [`envio local db-migrate setup`↴](#envio-local-db-migrate-setup) -* [`envio start`↴](#envio-start) - -## `envio` - -**Usage:** `envio [OPTIONS] ` - -###### **Subcommands:** - -* `init` β€” Initialize an indexer with one of the initialization options -* `dev` β€” Development commands for starting, stopping, and restarting the indexer with automatic codegen for any changed files -* `stop` β€” Stop the local environment - delete the database and stop all processes (including Docker) for the current directory -* `codegen` β€” Generate indexing code from user-defined configuration & schema files -* `local` β€” Prepare the local environment for Envio testing -* `start` β€” Start the indexer without any automatic codegen - -###### **Options:** - -* `-d`, `--directory ` β€” The directory of the project. Defaults to current dir ("./") -* `-o`, `--output-directory ` β€” The directory within the project that generated code should output to - - Default value: `generated` -* `--config ` β€” The file in the project containing config - - Default value: `config.yaml` - - - -## `envio init` - -Initialize an indexer with one of the initialization options - -**Usage:** `envio init [OPTIONS] [COMMAND]` - -###### **Subcommands:** - -* `template` β€” Initialize Evm indexer from an example template -* `contract-import` β€” Initialize Evm indexer by importing config from a contract for a given chain -* `fuel` β€” Initialization option for creating Fuel indexer - -###### **Options:** - -* `-n`, `--name ` β€” The name of your project -* `-l`, `--language ` β€” The language used to write handlers - - Possible values: `javascript`, `typescript`, `rescript` - - - - -## `envio init template` - -Initialize the Evm indexer from an example template - -**Usage:** `envio init template [OPTIONS]` - -###### **Options:** - -* `-t`, `--template