From 9d3e86efb587fa4f31419f248a1b73b6cc18242c Mon Sep 17 00:00:00 2001 From: arnaudberger Date: Thu, 18 Jul 2024 17:19:16 -0400 Subject: [PATCH 1/6] first commit --- cmd/substreams/codegen.go | 7 + cmd/substreams/init.go | 2 +- codegen/cmd.go | 5 + codegen/subgraph.go | 307 ++++++++++++++++++ codegen/templates/triggers/Makefile.gotmpl | 1 + codegen/templates/triggers/README.md | 91 ++++++ codegen/templates/triggers/buf.gen.yaml | 6 + .../dev-environment/config.toml.gotmpl | 22 ++ .../dev-environment/docker-compose.yml | 82 +++++ .../triggers/dev-environment/start.sh | 73 +++++ .../templates/triggers/package.json.gotmpl | 37 +++ .../templates/triggers/run-local.sh.gotmpl | 57 ++++ .../templates/triggers/schema.graphql.gotmpl | 9 + .../templates/triggers/src/mappings.ts.gotmpl | 16 + .../templates/triggers/subgraph.yaml.gotmpl | 19 ++ codegen/templates/triggers/tsconfig.json | 6 + go.mod | 3 +- go.sum | 2 + 18 files changed, 743 insertions(+), 2 deletions(-) create mode 100644 cmd/substreams/codegen.go create mode 100644 codegen/cmd.go create mode 100644 codegen/subgraph.go create mode 100644 codegen/templates/triggers/Makefile.gotmpl create mode 100644 codegen/templates/triggers/README.md create mode 100644 codegen/templates/triggers/buf.gen.yaml create mode 100644 codegen/templates/triggers/dev-environment/config.toml.gotmpl create mode 100644 codegen/templates/triggers/dev-environment/docker-compose.yml create mode 100755 codegen/templates/triggers/dev-environment/start.sh create mode 100644 codegen/templates/triggers/package.json.gotmpl create mode 100644 codegen/templates/triggers/run-local.sh.gotmpl create mode 100644 codegen/templates/triggers/schema.graphql.gotmpl create mode 100644 codegen/templates/triggers/src/mappings.ts.gotmpl create mode 100644 codegen/templates/triggers/subgraph.yaml.gotmpl create mode 100644 codegen/templates/triggers/tsconfig.json diff --git a/cmd/substreams/codegen.go b/cmd/substreams/codegen.go new file mode 100644 index 00000000..9725d1eb --- /dev/null +++ b/cmd/substreams/codegen.go @@ -0,0 +1,7 @@ +package main + +import "github.com/streamingfast/substreams/codegen" + +func init() { + rootCmd.AddCommand(codegen.Cmd) +} diff --git a/cmd/substreams/init.go b/cmd/substreams/init.go index 301fa980..5bff9faa 100644 --- a/cmd/substreams/init.go +++ b/cmd/substreams/init.go @@ -17,7 +17,7 @@ import ( "strings" "time" - connect "connectrpc.com/connect" + "connectrpc.com/connect" "github.com/charmbracelet/glamour" "github.com/charmbracelet/huh" "github.com/charmbracelet/huh/spinner" diff --git a/codegen/cmd.go b/codegen/cmd.go new file mode 100644 index 00000000..72e62c0d --- /dev/null +++ b/codegen/cmd.go @@ -0,0 +1,5 @@ +package codegen + +import "github.com/spf13/cobra" + +var Cmd = &cobra.Command{Use: "codegen", Short: "Code generator for substreams"} diff --git a/codegen/subgraph.go b/codegen/subgraph.go new file mode 100644 index 00000000..369c9717 --- /dev/null +++ b/codegen/subgraph.go @@ -0,0 +1,307 @@ +package codegen + +import ( + "bytes" + "embed" + "fmt" + "io/fs" + "os" + "path/filepath" + "strings" + "text/template" + + "github.com/bmatcuk/doublestar/v4" + "github.com/spf13/cobra" + "github.com/streamingfast/substreams/manifest" + pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" + "google.golang.org/protobuf/types/descriptorpb" +) + +var subgraphCmd = &cobra.Command{ + Use: "subgraph ", + Short: "Generate subgraph dev environment from substreams manifest", + Args: cobra.ExactArgs(2), + RunE: generateSubgraphEnv, +} + +func init() { + subgraphCmd.Flags().String("substreams-api-token-envvar", "SUBSTREAMS_API_TOKEN", "name of variable containing Substreams Authentication token") + Cmd.AddCommand(subgraphCmd) +} + +type Project struct { + Name string + Network string + Module *pbsubstreams.Module + OutputDescriptor *descriptorpb.DescriptorProto + ProtoTypeMapping map[string]*descriptorpb.DescriptorProto +} + +func NewProject(name, network string, module *pbsubstreams.Module, outputDescriptor *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) *Project { + return &Project{ + Network: network, + Name: name, + Module: module, + OutputDescriptor: outputDescriptor, + ProtoTypeMapping: protoTypeMapping, + } +} + +func (p *Project) SubstreamsKebabName() string { + return strings.ReplaceAll(p.Name, "_", "-") +} + +func (p *Project) GetModuleName() string { + return p.Module.Name +} + +func (p *Project) GetEntityOutputName() string { + return p.OutputDescriptor.GetName() +} + +func GetExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[string]*descriptorpb.DescriptorProto { + var protoTypeMapping = map[string]*descriptorpb.DescriptorProto{} + for _, protoFile := range protoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + currentName := "." + packageName + "." + message.GetName() + protoTypeMapping[currentName] = message + processMessage(message, currentName, protoTypeMapping) + } + } + + return protoTypeMapping +} + +func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { + for _, nestedMessage := range message.NestedType { + currentName := "." + parentName + "." + nestedMessage.GetName() + protoTypeMapping[currentName] = nestedMessage + processMessage(nestedMessage, currentName, protoTypeMapping) + } +} + +func (p *Project) GetEntities() (map[string]map[string]string, error) { + var outputMap = map[string]map[string]string{} + err := p.GetEntityFromMessage(p.OutputDescriptor, outputMap) + if err != nil { + return nil, fmt.Errorf("getting entities: %w", err) + } + + return outputMap, nil +} + +func (p *Project) GetEntityFromMessage(message *descriptorpb.DescriptorProto, inputMap map[string]map[string]string) error { + var fieldMapping = map[string]string{} + for _, field := range message.GetField() { + switch *field.Type { + case descriptorpb.FieldDescriptorProto_TYPE_BYTES: + fieldMapping[field.GetName()] = "Bytes!" + case descriptorpb.FieldDescriptorProto_TYPE_UINT64: + fieldMapping[field.GetName()] = "Int!" + case descriptorpb.FieldDescriptorProto_TYPE_INT64: + fieldMapping[field.GetName()] = "Int!" + case descriptorpb.FieldDescriptorProto_TYPE_INT32: + fieldMapping[field.GetName()] = "Int!" + case descriptorpb.FieldDescriptorProto_TYPE_UINT32: + fieldMapping[field.GetName()] = "Int!" + case descriptorpb.FieldDescriptorProto_TYPE_STRING: + fieldMapping[field.GetName()] = "String!" + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + sanitizeFieldName := (*field.TypeName)[strings.LastIndex(*field.TypeName, ".")+1:] + switch *field.Label { + case descriptorpb.FieldDescriptorProto_LABEL_REPEATED: + fieldMapping[field.GetName()] = "[" + sanitizeFieldName + "]!" + case descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL: + fieldMapping[field.GetName()] = sanitizeFieldName + "!" + case descriptorpb.FieldDescriptorProto_LABEL_REQUIRED: + fieldMapping[field.GetName()] = sanitizeFieldName + "!" + default: + return fmt.Errorf("field label %q not supported", *field.Label) + } + nestedMessage := p.ProtoTypeMapping[*field.TypeName] + err := p.GetEntityFromMessage(nestedMessage, inputMap) + if err != nil { + return fmt.Errorf("getting entity from message: %w", err) + } + default: + return fmt.Errorf("field type %q not supported", *field.Type) + } + } + + inputMap[message.GetName()] = fieldMapping + return nil +} + +//go:embed templates/* +var templatesFS embed.FS + +func ParseFS(myFuncs template.FuncMap, fsys fs.FS, pattern string) (*template.Template, error) { + t := template.New("").Funcs(myFuncs) + filenames, err := doublestar.Glob(fsys, pattern) + if err != nil { + return nil, err + } + if len(filenames) == 0 { + return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) + } + + for _, filename := range filenames { + b, err := fs.ReadFile(fsys, filename) + if err != nil { + return nil, err + } + + name, _ := strings.CutPrefix(filename, "templates/") + + _, err = t.New(name).Parse(string(b)) + if err != nil { + return nil, err + } + } + return t, nil +} + +func (p *Project) Render() (projectFiles map[string][]byte, err error) { + projectFiles = map[string][]byte{} + + tpls, err := ParseFS(nil, templatesFS, "**/*.gotmpl") + if err != nil { + return nil, fmt.Errorf("parse templates: %w", err) + } + + templateFiles := map[string]string{ + "triggers/Makefile.gotmpl": "Makefile", + "triggers/buf.gen.yaml": "buf.gen.yaml", + "triggers/package.json.gotmpl": "package.json", + "triggers/tsconfig.json": "tsconfig.json", + "triggers/subgraph.yaml.gotmpl": "subgraph.yaml", + "triggers/schema.graphql.gotmpl": "schema.graphql", + "triggers/src/mappings.ts.gotmpl": "src/mappings.ts", + "triggers/run-local.sh.gotmpl": "run-local.sh", + } + + for templateFile, finalFileName := range templateFiles { + var content []byte + if strings.HasSuffix(templateFile, ".gotmpl") { + buffer := &bytes.Buffer{} + if err := tpls.ExecuteTemplate(buffer, templateFile, p); err != nil { + return nil, fmt.Errorf("embed render entry template %q: %w", templateFile, err) + } + content = buffer.Bytes() + } else { + content, err = templatesFS.ReadFile("templates/" + templateFile) + if err != nil { + return nil, fmt.Errorf("reading %q: %w", templateFile, err) + } + } + + projectFiles[finalFileName] = content + } + + return +} + +func getModule(pkg *pbsubstreams.Package, moduleName string) (*pbsubstreams.Module, error) { + existingModules := pkg.GetModules().GetModules() + for _, module := range existingModules { + if (module.Name) == moduleName { + return module, nil + } + } + + return nil, fmt.Errorf("module %q does not exists", moduleName) +} + +// delete all partial files which are already merged into the kv store +func generateSubgraphEnv(cmd *cobra.Command, args []string) error { + //ctx := cmd.Context() + manifestPath := args[0] + moduleName := args[1] + reader, err := manifest.NewReader(manifestPath) + if err != nil { + return fmt.Errorf("manifest reader: %w", err) + } + + pkg, _, err := reader.Read() + if err != nil { + return fmt.Errorf("read manifest %q: %w", manifestPath, err) + } + + requestedModule, err := getModule(pkg, moduleName) + if err != nil { + return fmt.Errorf("getting module: %w", err) + } + + if pkg.GetPackageMeta()[0] == nil { + return fmt.Errorf("package meta not found") + } + + messageDescriptor, err := searchForMessageTypeIntoPackage(pkg, requestedModule.Output.Type) + if err != nil { + return fmt.Errorf("searching for message type: %w", err) + } + + protoTypeMapping := GetExistingProtoTypes(pkg.ProtoFiles) + + project := NewProject(pkg.GetPackageMeta()[0].Name, pkg.Network, requestedModule, messageDescriptor, protoTypeMapping) + + projectFiles, err := project.Render() + if err != nil { + return fmt.Errorf("rendering project files: %w", err) + } + + saveDir := "/tmp/testSubCmd/" + + err = os.MkdirAll(saveDir, 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", saveDir, err) + } + + for fileName, fileContent := range projectFiles { + filePath := filepath.Join(saveDir, fileName) + + err := os.MkdirAll(filepath.Dir(filePath), 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", filepath.Dir(filePath), err) + } + + err = os.WriteFile(filePath, fileContent, 0644) + if err != nil { + return fmt.Errorf("saving file %s: %w", filePath, err) + } + } + + return nil +} + +func searchForMessageTypeIntoPackage(pkg *pbsubstreams.Package, outputType string) (*descriptorpb.DescriptorProto, error) { + sanitizeMessageType := outputType[strings.Index(outputType, ":")+1:] + for _, protoFile := range pkg.ProtoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + if packageName+"."+message.GetName() == sanitizeMessageType { + return message, nil + } + + nestedMessage := checkNestedMessages(message, packageName, sanitizeMessageType) + if nestedMessage != nil { + return nestedMessage, nil + } + } + } + + return nil, fmt.Errorf("message type %q not found in package", sanitizeMessageType) +} + +func checkNestedMessages(message *descriptorpb.DescriptorProto, packageName, messageType string) *descriptorpb.DescriptorProto { + for _, nestedMessage := range message.NestedType { + if packageName+"."+message.GetName()+"."+nestedMessage.GetName() == messageType { + return nestedMessage + } + + checkNestedMessages(nestedMessage, packageName, messageType) + } + + return nil +} diff --git a/codegen/templates/triggers/Makefile.gotmpl b/codegen/templates/triggers/Makefile.gotmpl new file mode 100644 index 00000000..96efb8bb --- /dev/null +++ b/codegen/templates/triggers/Makefile.gotmpl @@ -0,0 +1 @@ +//Not implemented yet \ No newline at end of file diff --git a/codegen/templates/triggers/README.md b/codegen/templates/triggers/README.md new file mode 100644 index 00000000..ce2e0ebc --- /dev/null +++ b/codegen/templates/triggers/README.md @@ -0,0 +1,91 @@ +# Description + +- This is a generated Substreams-powered-Subgraph + +# Dependencies + +## Get Substreams CLI (optional) + +To try the Substreams directly, you need to install the `substreams CLI` (v1.7.2 or above). + +You have many options as explained in this [installation guide](https://substreams.streamingfast.io/documentation/consume/installing-the-cli). + +Check if `substreams` was installed successfully, you can run the following command: + +```bash +substreams --version +> substreams version ... +``` + +## Get Substreams API Token + +To try the Substreams directly or to run a local graph-node instance, you will need to get a Substreams API token. +Follow the instructions on the [authentification section](https://substreams.streamingfast.io/documentation/consume/authentication) in the `StreamingFast` documentation. + +## Install Docker + +To run a local `graph-node` instance, you will need to install Docker. You can do it by following the instructions on the [official Docker website](https://docs.docker.com/get-docker/). + +## Install buf cli + +To run the proto assembly script bindings, you will need to install the `buf` [cli](https://buf.build/docs/installation). + +## Run the entire stack with the `run-local.sh` script + +You can run the entire stack (`docker`, `npm` installations and `graph` creation with deployment) by running the below script + +```bash +./run-local.sh +``` + +However, if you want to run each commen individually, follow the instructions below: + +## Install npm and nodeJS packages + +Run the following command in the `root` of the repository: + +```bash +npm install +``` + +## Generate proto assembly script bindings + +```bash +npm run generate +``` + +### Generate subgraph mapping types + +```bash +npm run codegen +``` + +# Deploy a subgraph + +## On a local dev environment + +### Launch docker-compose environment + +To deploy your subgraph locally, you need to run a local graph-node instance. To do so, export your `SUBSTREAMS_API_TOKEN` and +use the `launch-graph-node` script : + +```bash +docker compose -f dev-environment/docker-compose.yml up -d --wait +``` + +This script is running `docker compose` to create all necessary instances to launch properly the node locally, connecting to Streamingfast Substreams API. + +### Deploy locally + +Then, from another terminal: + +```bash +export SUBSTREAMS_API_TOKEN = "YOUR_TOKEN" +npm run create-local +npm run deploy-local +npm run remove-local +``` + +### Query a subgraph + +Once you subgraph is deployed, you can query it! To do so, you can directly write your query locally on http://localhost:8000/subgraphs/name/{name_of_your_subgraph}/ diff --git a/codegen/templates/triggers/buf.gen.yaml b/codegen/templates/triggers/buf.gen.yaml new file mode 100644 index 00000000..d6e2175e --- /dev/null +++ b/codegen/templates/triggers/buf.gen.yaml @@ -0,0 +1,6 @@ +version: v1 +plugins: + - name: as-generator + path: ./node_modules/.bin/as-proto-gen + strategy: all + out: src/pb diff --git a/codegen/templates/triggers/dev-environment/config.toml.gotmpl b/codegen/templates/triggers/dev-environment/config.toml.gotmpl new file mode 100644 index 00000000..f23f1f7d --- /dev/null +++ b/codegen/templates/triggers/dev-environment/config.toml.gotmpl @@ -0,0 +1,22 @@ +[general] + +[store] +[store.primary] +connection = "postgresql://graph-node:let-me-in@postgres:5432/graph-node" +weight = 1 +pool_size = 10 + +[chains] +ingestor = "block_ingestor_node" + +[chains.{{ .ChainName }}] +protocol = "substreams" +shard = "primary" +provider = [ + { label = "substreams", details = { type = "substreams", url = "https://{{ $.ChainEndpoint }}", token = "$SUBSTREAMS_API_TOKEN", conn_pool_size = 1 } }, +] + +[deployment] +[[deployment.rule]] +shard = "primary" +indexers = ["default"] diff --git a/codegen/templates/triggers/dev-environment/docker-compose.yml b/codegen/templates/triggers/dev-environment/docker-compose.yml new file mode 100644 index 00000000..b0933d54 --- /dev/null +++ b/codegen/templates/triggers/dev-environment/docker-compose.yml @@ -0,0 +1,82 @@ +version: "3" +services: + graph-node: + container_name: graph-node + image: graphprotocol/graph-node:canary-substreams-index-module-e532c3135 + ports: + - "8000:8000" + - "8001:8001" + - "8020:8020" + - "8030:8030" + - "8040:8040" + depends_on: + - ipfs + - postgres + extra_hosts: + - host.docker.internal:host-gateway + environment: + GRAPH_NODE_CONFIG: /etc/graph-node/config.toml + SUBSTREAMS_API_TOKEN: $SUBSTREAMS_API_TOKEN + GRAPH_STORE_WRITE_BATCH_SIZE: 0 + postgres_host: postgres + postgres_user: graph-node + postgres_pass: let-me-in + postgres_db: graph-node + ipfs: "ipfs:5001" + ethereum: "mainnet:http://host.docker.internal:8545" + GRAPH_LOG: info + volumes: + - ./config.toml:/etc/graph-node/config.toml + healthcheck: + test: ["CMD", "nc", "-z", "localhost", "8000"] + interval: 30s + timeout: 10s + retries: 15 + + ipfs: + container_name: ipfs-gn + image: ipfs/kubo:v0.14.0 + ports: + - "5001:5001" + - "5002:8080" + volumes: + - ./data/ipfs:/data/ipfs + - ./data/ipfs-export:/export + healthcheck: + test: ["CMD", "nc", "-z", "localhost", "5001"] + interval: 30s + timeout: 10s + retries: 15 + + postgres: + container_name: postgres-gn + image: postgres:14 + ports: + - "5432:5432" + command: ["postgres", "-cshared_preload_libraries=pg_stat_statements"] + environment: + POSTGRES_USER: graph-node + POSTGRES_PASSWORD: let-me-in + POSTGRES_DB: graph-node + POSTGRES_INITDB_ARGS: "-E UTF8 --locale=C" + volumes: + - ./data/postgres:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready"] + interval: 30s + timeout: 10s + retries: 15 + + pgweb: + container_name: pgweb-gn + image: sosedoff/pgweb:0.11.12 + restart: on-failure + ports: + - "8081:8081" + command: ["pgweb", "--bind=0.0.0.0", "--listen=8081", "--binary-codec=hex"] + links: + - postgres:postgres + environment: + - DATABASE_URL=postgres://graph-node:let-me-in@postgres:5432/graph-node?sslmode=disable + depends_on: + - postgres diff --git a/codegen/templates/triggers/dev-environment/start.sh b/codegen/templates/triggers/dev-environment/start.sh new file mode 100755 index 00000000..5c693bac --- /dev/null +++ b/codegen/templates/triggers/dev-environment/start.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export CONFIG=${ROOT}/generated-config.toml + +clean= + +graph="../node_modules/.bin/graph" + +main() { + pushd "$ROOT" &> /dev/null + + while getopts "hc" opt; do + case $opt in + h) usage && exit 0;; + c) clean=true;; + \?) usage_error "Invalid option: -$OPTARG";; + esac + done + + shift $((OPTIND-1)) + + if [[ -d "./data" && $clean == true ]]; then + echo "Cleaning data directory" + rm -rf ./data 1> /dev/null + fi + + prepare + checkToken + + exec docker-compose up +} +prepare() { + if [[ ! -d "./data/ipfs" ]]; then + mkdir -p ./data/ipfs 1> /dev/null + fi + + if [[ ! -d "./data/postgres" ]]; then + mkdir -p ./data/postgres 1> /dev/null + fi +} + +checkToken() { + if [[ -z "${SUBSTREAMS_API_TOKEN}" ]]; then + echo "Please set SUBSTREAMS_API_TOKEN in your environment" + exit 1 + fi +} + +usage_error() { + message="$1" + exit_code="$2" + + echo "ERROR: $message" + echo "" + usage + exit ${exit_code:-1} +} + +usage() { + echo "usage: up [-c]" + echo "" + echo "Setup required files layout and launch 'docker compose up'" + echo "spinning up all required development dependencies." + echo "" + echo "Options" + echo " -c Clean 'data' directory before launching dependencies" + echo " -h Display help about this script" +} + +main "$@" \ No newline at end of file diff --git a/codegen/templates/triggers/package.json.gotmpl b/codegen/templates/triggers/package.json.gotmpl new file mode 100644 index 00000000..f8d3f574 --- /dev/null +++ b/codegen/templates/triggers/package.json.gotmpl @@ -0,0 +1,37 @@ +{ + "name": "{{ .Name }}", + "license": "UNLICENSED", + "scripts": { + "codegen": "graph codegen", + "build": "graph build", + "publish": "graph publish", + "generate": "buf generate", + "deploy": "graph deploy --node https://api.studio.thegraph.com/deploy/ {{ .Name }}", + "create-local": "graph create --node http://localhost:8020/ {{ .Name }}", + "remove-local": "graph remove --node http://localhost:8020/ {{ .Name }}", + "deploy-local": "graph deploy --node http://localhost:8020/ --ipfs http://localhost:5001 {{ .Name }} --version-label=v0.0.1", + "test": "graph test" + }, + "dependencies": { + "@graphprotocol/graph-cli": "^0.73.0", + "@graphprotocol/graph-ts": "^0.34.0", + "@types/node": "^16.11.9", + "@typescript-eslint/eslint-plugin": "^2.0.0", + "@typescript-eslint/parser": "^2.0.0", + "as-proto": "^1.3.0", + "assemblyscript-json": "^1.1.0", + "eslint": "^6.2.2", + "eslint-config-prettier": "^6.1.0", + "prettier": "^1.18.2", + "protoc-gen-js": "^3.21.2", + "protocol-buffers": "^5.0.0", + "ts-node": "^10.9.2", + "tsx": "^4.7.1", + "typescript": "^3.9.10" + }, + "devDependencies": { + "@types/google-protobuf": "^3.15.12", + "as-proto-gen": "^1.3.0", + "matchstick-as": "^0.6.0" + } +} diff --git a/codegen/templates/triggers/run-local.sh.gotmpl b/codegen/templates/triggers/run-local.sh.gotmpl new file mode 100644 index 00000000..e0577d80 --- /dev/null +++ b/codegen/templates/triggers/run-local.sh.gotmpl @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +example_query=" +{ + {{ $.GetEntityOutputName }} { + + } +} +" + +if [[ -z $SUBSTREAMS_API_TOKEN ]]; then + echo "Please set SUBSTREAMS_API_TOKEN in your environment" + exit 1 +fi + +echo "" +echo "----- Running docker environment -----" +echo "" +sleep 1 +docker compose -f $ROOT/dev-environment/docker-compose.yml up -d --wait + +echo "" +echo "----- Installing npm dependencies -----" +echo "" +sleep 1 +npm install + +echo "" +echo "----- Generating bindings -----" +echo "" +sleep 1 +npm run generate + +echo "" +echo "----- Generating codegen -----" +echo "" +sleep 1 +npm run codegen + +echo "" +echo "----- Creating local graph -----" +echo "" +npm run create-local + +echo "" +echo "----- Running local graph -----" +echo "" +sleep 1 +npm run deploy-local + +echo "Here is an example query you can run:" +echo "" +echo $example_query \ No newline at end of file diff --git a/codegen/templates/triggers/schema.graphql.gotmpl b/codegen/templates/triggers/schema.graphql.gotmpl new file mode 100644 index 00000000..a5b59e7a --- /dev/null +++ b/codegen/templates/triggers/schema.graphql.gotmpl @@ -0,0 +1,9 @@ +{{- range $entityName, $entityMapping := $.GetEntities }} +type {{ $entityName }} @entity { + id: ID! + {{- range $fieldName, $graphqlType := $entityMapping }} + {{ $fieldName }}: {{ $graphqlType }} + {{- end }} +} + +{{ end -}} \ No newline at end of file diff --git a/codegen/templates/triggers/src/mappings.ts.gotmpl b/codegen/templates/triggers/src/mappings.ts.gotmpl new file mode 100644 index 00000000..d78bcc9f --- /dev/null +++ b/codegen/templates/triggers/src/mappings.ts.gotmpl @@ -0,0 +1,16 @@ +import { log } from "@graphprotocol/graph-ts"; +import { Protobuf } from "as-proto/assembly"; +import { JSON } from "assemblyscript-json"; +import { {{ $.GetEntityOutputName }} } from "../generated/schema"; +import { {{ $.GetEntityOutputName}} } from "./pb/contract/v1/Triggers"; + + +export function handleTriggers(bytes: Uint8Array): void { + const {{ $.GetEntityOutputName }} = Protobuf.decode<{{ $.GetEntityOutputName}}>( + bytes, + {{ $.GetEntityOutputName }}.decode + ); + + test +} + diff --git a/codegen/templates/triggers/subgraph.yaml.gotmpl b/codegen/templates/triggers/subgraph.yaml.gotmpl new file mode 100644 index 00000000..781f3d30 --- /dev/null +++ b/codegen/templates/triggers/subgraph.yaml.gotmpl @@ -0,0 +1,19 @@ +specVersion: 1.0.0 +description: {{ .Name }} Substreams-powered-Subgraph +indexerHints: + prune: auto +schema: + file: ./schema.graphql +dataSources: + - kind: substreams + name: test + network: {{ .Network }} + source: + package: + moduleName: {{ $.GetModuleName }} + file: {{ $.SubstreamsKebabName }}-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers \ No newline at end of file diff --git a/codegen/templates/triggers/tsconfig.json b/codegen/templates/triggers/tsconfig.json new file mode 100644 index 00000000..fd66650d --- /dev/null +++ b/codegen/templates/triggers/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@graphprotocol/graph-ts/types/tsconfig.base.json", + "include": [ + "src" + ] +} diff --git a/go.mod b/go.mod index 7cc671bd..4f224643 100644 --- a/go.mod +++ b/go.mod @@ -32,6 +32,7 @@ require ( github.com/RoaringBitmap/roaring v1.9.1 github.com/alecthomas/chroma v0.10.0 github.com/alecthomas/participle v0.7.1 + github.com/bmatcuk/doublestar/v4 v4.6.1 github.com/bytecodealliance/wasmtime-go/v4 v4.0.0 github.com/charmbracelet/bubbles v0.18.0 github.com/charmbracelet/bubbletea v0.26.3 @@ -152,7 +153,7 @@ require ( github.com/ipfs/go-cid v0.4.0 // indirect github.com/itchyny/timefmt-go v0.1.5 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/klauspost/compress v1.16.6 // indirect + github.com/klauspost/compress v1.16.6 github.com/klauspost/cpuid/v2 v2.2.3 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect diff --git a/go.sum b/go.sum index 20f9c74d..c0b7e5de 100644 --- a/go.sum +++ b/go.sum @@ -146,6 +146,8 @@ github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6 github.com/blendle/zapdriver v1.3.1/go.mod h1:mdXfREi6u5MArG4j9fewC+FGnXaBR+T4Ox4J2u4eHCc= github.com/blendle/zapdriver v1.3.2-0.20200203083823-9200777f8a3d h1:fSlGu5ePbkjBidXuj2O5j9EcYrVB5Cr6/wdkYyDgxZk= github.com/blendle/zapdriver v1.3.2-0.20200203083823-9200777f8a3d/go.mod h1:yCBkgASmKHgUOFjK9h1sOytUVgA+JkQjqj3xYP4AdWY= +github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I= +github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bobg/go-generics/v2 v2.1.1 h1:4rN9upY6Xm4TASSMeH+NzUghgO4h/SbNrQphIjRd/R0= github.com/bobg/go-generics/v2 v2.1.1/go.mod h1:iPMSRVFlzkJSYOCXQ0n92RA3Vxw0RBv2E8j9ZODXgHk= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= From 12daddac3bec863ee32fe4046843d94847fab75e Mon Sep 17 00:00:00 2001 From: arnaudberger Date: Fri, 19 Jul 2024 19:13:37 -0400 Subject: [PATCH 2/6] save current state --- codegen/cmd.go | 9 +- codegen/subgraph.go | 307 ------------------ codegen/subgraph/chainconfig.go | 94 ++++++ codegen/subgraph/helpers.go | 153 +++++++++ codegen/subgraph/project.go | 143 ++++++++ codegen/subgraph/subgraph.go | 92 ++++++ .../templates/triggers/Makefile.gotmpl | 0 .../templates/triggers/README.md | 0 .../templates/triggers/buf.gen.yaml | 0 .../dev-environment/config.toml.gotmpl | 2 +- .../dev-environment/docker-compose.yml | 0 .../triggers/dev-environment/start.sh | 0 .../templates/triggers/package.json.gotmpl | 2 +- .../templates/triggers/run-local.sh.gotmpl | 3 - .../templates/triggers/schema.graphql.gotmpl | 11 + .../templates/triggers/src/mappings.ts.gotmpl | 48 +++ .../templates/triggers/subgraph.yaml.gotmpl | 0 .../templates/triggers/tsconfig.json | 0 .../templates/triggers/schema.graphql.gotmpl | 9 - .../templates/triggers/src/mappings.ts.gotmpl | 16 - 20 files changed, 551 insertions(+), 338 deletions(-) delete mode 100644 codegen/subgraph.go create mode 100644 codegen/subgraph/chainconfig.go create mode 100644 codegen/subgraph/helpers.go create mode 100644 codegen/subgraph/project.go create mode 100644 codegen/subgraph/subgraph.go rename codegen/{ => subgraph}/templates/triggers/Makefile.gotmpl (100%) rename codegen/{ => subgraph}/templates/triggers/README.md (100%) rename codegen/{ => subgraph}/templates/triggers/buf.gen.yaml (100%) rename codegen/{ => subgraph}/templates/triggers/dev-environment/config.toml.gotmpl (94%) rename codegen/{ => subgraph}/templates/triggers/dev-environment/docker-compose.yml (100%) rename codegen/{ => subgraph}/templates/triggers/dev-environment/start.sh (100%) rename codegen/{ => subgraph}/templates/triggers/package.json.gotmpl (90%) rename codegen/{ => subgraph}/templates/triggers/run-local.sh.gotmpl (95%) create mode 100644 codegen/subgraph/templates/triggers/schema.graphql.gotmpl create mode 100644 codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl rename codegen/{ => subgraph}/templates/triggers/subgraph.yaml.gotmpl (100%) rename codegen/{ => subgraph}/templates/triggers/tsconfig.json (100%) delete mode 100644 codegen/templates/triggers/schema.graphql.gotmpl delete mode 100644 codegen/templates/triggers/src/mappings.ts.gotmpl diff --git a/codegen/cmd.go b/codegen/cmd.go index 72e62c0d..3082bfa7 100644 --- a/codegen/cmd.go +++ b/codegen/cmd.go @@ -1,5 +1,12 @@ package codegen -import "github.com/spf13/cobra" +import ( + "github.com/spf13/cobra" + "github.com/streamingfast/substreams/codegen/subgraph" +) var Cmd = &cobra.Command{Use: "codegen", Short: "Code generator for substreams"} + +func init() { + Cmd.AddCommand(subgraph.SubgraphCmd) +} diff --git a/codegen/subgraph.go b/codegen/subgraph.go deleted file mode 100644 index 369c9717..00000000 --- a/codegen/subgraph.go +++ /dev/null @@ -1,307 +0,0 @@ -package codegen - -import ( - "bytes" - "embed" - "fmt" - "io/fs" - "os" - "path/filepath" - "strings" - "text/template" - - "github.com/bmatcuk/doublestar/v4" - "github.com/spf13/cobra" - "github.com/streamingfast/substreams/manifest" - pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" - "google.golang.org/protobuf/types/descriptorpb" -) - -var subgraphCmd = &cobra.Command{ - Use: "subgraph ", - Short: "Generate subgraph dev environment from substreams manifest", - Args: cobra.ExactArgs(2), - RunE: generateSubgraphEnv, -} - -func init() { - subgraphCmd.Flags().String("substreams-api-token-envvar", "SUBSTREAMS_API_TOKEN", "name of variable containing Substreams Authentication token") - Cmd.AddCommand(subgraphCmd) -} - -type Project struct { - Name string - Network string - Module *pbsubstreams.Module - OutputDescriptor *descriptorpb.DescriptorProto - ProtoTypeMapping map[string]*descriptorpb.DescriptorProto -} - -func NewProject(name, network string, module *pbsubstreams.Module, outputDescriptor *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) *Project { - return &Project{ - Network: network, - Name: name, - Module: module, - OutputDescriptor: outputDescriptor, - ProtoTypeMapping: protoTypeMapping, - } -} - -func (p *Project) SubstreamsKebabName() string { - return strings.ReplaceAll(p.Name, "_", "-") -} - -func (p *Project) GetModuleName() string { - return p.Module.Name -} - -func (p *Project) GetEntityOutputName() string { - return p.OutputDescriptor.GetName() -} - -func GetExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[string]*descriptorpb.DescriptorProto { - var protoTypeMapping = map[string]*descriptorpb.DescriptorProto{} - for _, protoFile := range protoFiles { - packageName := protoFile.GetPackage() - for _, message := range protoFile.MessageType { - currentName := "." + packageName + "." + message.GetName() - protoTypeMapping[currentName] = message - processMessage(message, currentName, protoTypeMapping) - } - } - - return protoTypeMapping -} - -func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { - for _, nestedMessage := range message.NestedType { - currentName := "." + parentName + "." + nestedMessage.GetName() - protoTypeMapping[currentName] = nestedMessage - processMessage(nestedMessage, currentName, protoTypeMapping) - } -} - -func (p *Project) GetEntities() (map[string]map[string]string, error) { - var outputMap = map[string]map[string]string{} - err := p.GetEntityFromMessage(p.OutputDescriptor, outputMap) - if err != nil { - return nil, fmt.Errorf("getting entities: %w", err) - } - - return outputMap, nil -} - -func (p *Project) GetEntityFromMessage(message *descriptorpb.DescriptorProto, inputMap map[string]map[string]string) error { - var fieldMapping = map[string]string{} - for _, field := range message.GetField() { - switch *field.Type { - case descriptorpb.FieldDescriptorProto_TYPE_BYTES: - fieldMapping[field.GetName()] = "Bytes!" - case descriptorpb.FieldDescriptorProto_TYPE_UINT64: - fieldMapping[field.GetName()] = "Int!" - case descriptorpb.FieldDescriptorProto_TYPE_INT64: - fieldMapping[field.GetName()] = "Int!" - case descriptorpb.FieldDescriptorProto_TYPE_INT32: - fieldMapping[field.GetName()] = "Int!" - case descriptorpb.FieldDescriptorProto_TYPE_UINT32: - fieldMapping[field.GetName()] = "Int!" - case descriptorpb.FieldDescriptorProto_TYPE_STRING: - fieldMapping[field.GetName()] = "String!" - case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: - sanitizeFieldName := (*field.TypeName)[strings.LastIndex(*field.TypeName, ".")+1:] - switch *field.Label { - case descriptorpb.FieldDescriptorProto_LABEL_REPEATED: - fieldMapping[field.GetName()] = "[" + sanitizeFieldName + "]!" - case descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL: - fieldMapping[field.GetName()] = sanitizeFieldName + "!" - case descriptorpb.FieldDescriptorProto_LABEL_REQUIRED: - fieldMapping[field.GetName()] = sanitizeFieldName + "!" - default: - return fmt.Errorf("field label %q not supported", *field.Label) - } - nestedMessage := p.ProtoTypeMapping[*field.TypeName] - err := p.GetEntityFromMessage(nestedMessage, inputMap) - if err != nil { - return fmt.Errorf("getting entity from message: %w", err) - } - default: - return fmt.Errorf("field type %q not supported", *field.Type) - } - } - - inputMap[message.GetName()] = fieldMapping - return nil -} - -//go:embed templates/* -var templatesFS embed.FS - -func ParseFS(myFuncs template.FuncMap, fsys fs.FS, pattern string) (*template.Template, error) { - t := template.New("").Funcs(myFuncs) - filenames, err := doublestar.Glob(fsys, pattern) - if err != nil { - return nil, err - } - if len(filenames) == 0 { - return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) - } - - for _, filename := range filenames { - b, err := fs.ReadFile(fsys, filename) - if err != nil { - return nil, err - } - - name, _ := strings.CutPrefix(filename, "templates/") - - _, err = t.New(name).Parse(string(b)) - if err != nil { - return nil, err - } - } - return t, nil -} - -func (p *Project) Render() (projectFiles map[string][]byte, err error) { - projectFiles = map[string][]byte{} - - tpls, err := ParseFS(nil, templatesFS, "**/*.gotmpl") - if err != nil { - return nil, fmt.Errorf("parse templates: %w", err) - } - - templateFiles := map[string]string{ - "triggers/Makefile.gotmpl": "Makefile", - "triggers/buf.gen.yaml": "buf.gen.yaml", - "triggers/package.json.gotmpl": "package.json", - "triggers/tsconfig.json": "tsconfig.json", - "triggers/subgraph.yaml.gotmpl": "subgraph.yaml", - "triggers/schema.graphql.gotmpl": "schema.graphql", - "triggers/src/mappings.ts.gotmpl": "src/mappings.ts", - "triggers/run-local.sh.gotmpl": "run-local.sh", - } - - for templateFile, finalFileName := range templateFiles { - var content []byte - if strings.HasSuffix(templateFile, ".gotmpl") { - buffer := &bytes.Buffer{} - if err := tpls.ExecuteTemplate(buffer, templateFile, p); err != nil { - return nil, fmt.Errorf("embed render entry template %q: %w", templateFile, err) - } - content = buffer.Bytes() - } else { - content, err = templatesFS.ReadFile("templates/" + templateFile) - if err != nil { - return nil, fmt.Errorf("reading %q: %w", templateFile, err) - } - } - - projectFiles[finalFileName] = content - } - - return -} - -func getModule(pkg *pbsubstreams.Package, moduleName string) (*pbsubstreams.Module, error) { - existingModules := pkg.GetModules().GetModules() - for _, module := range existingModules { - if (module.Name) == moduleName { - return module, nil - } - } - - return nil, fmt.Errorf("module %q does not exists", moduleName) -} - -// delete all partial files which are already merged into the kv store -func generateSubgraphEnv(cmd *cobra.Command, args []string) error { - //ctx := cmd.Context() - manifestPath := args[0] - moduleName := args[1] - reader, err := manifest.NewReader(manifestPath) - if err != nil { - return fmt.Errorf("manifest reader: %w", err) - } - - pkg, _, err := reader.Read() - if err != nil { - return fmt.Errorf("read manifest %q: %w", manifestPath, err) - } - - requestedModule, err := getModule(pkg, moduleName) - if err != nil { - return fmt.Errorf("getting module: %w", err) - } - - if pkg.GetPackageMeta()[0] == nil { - return fmt.Errorf("package meta not found") - } - - messageDescriptor, err := searchForMessageTypeIntoPackage(pkg, requestedModule.Output.Type) - if err != nil { - return fmt.Errorf("searching for message type: %w", err) - } - - protoTypeMapping := GetExistingProtoTypes(pkg.ProtoFiles) - - project := NewProject(pkg.GetPackageMeta()[0].Name, pkg.Network, requestedModule, messageDescriptor, protoTypeMapping) - - projectFiles, err := project.Render() - if err != nil { - return fmt.Errorf("rendering project files: %w", err) - } - - saveDir := "/tmp/testSubCmd/" - - err = os.MkdirAll(saveDir, 0755) - if err != nil { - return fmt.Errorf("creating directory %s: %w", saveDir, err) - } - - for fileName, fileContent := range projectFiles { - filePath := filepath.Join(saveDir, fileName) - - err := os.MkdirAll(filepath.Dir(filePath), 0755) - if err != nil { - return fmt.Errorf("creating directory %s: %w", filepath.Dir(filePath), err) - } - - err = os.WriteFile(filePath, fileContent, 0644) - if err != nil { - return fmt.Errorf("saving file %s: %w", filePath, err) - } - } - - return nil -} - -func searchForMessageTypeIntoPackage(pkg *pbsubstreams.Package, outputType string) (*descriptorpb.DescriptorProto, error) { - sanitizeMessageType := outputType[strings.Index(outputType, ":")+1:] - for _, protoFile := range pkg.ProtoFiles { - packageName := protoFile.GetPackage() - for _, message := range protoFile.MessageType { - if packageName+"."+message.GetName() == sanitizeMessageType { - return message, nil - } - - nestedMessage := checkNestedMessages(message, packageName, sanitizeMessageType) - if nestedMessage != nil { - return nestedMessage, nil - } - } - } - - return nil, fmt.Errorf("message type %q not found in package", sanitizeMessageType) -} - -func checkNestedMessages(message *descriptorpb.DescriptorProto, packageName, messageType string) *descriptorpb.DescriptorProto { - for _, nestedMessage := range message.NestedType { - if packageName+"."+message.GetName()+"."+nestedMessage.GetName() == messageType { - return nestedMessage - } - - checkNestedMessages(nestedMessage, packageName, messageType) - } - - return nil -} diff --git a/codegen/subgraph/chainconfig.go b/codegen/subgraph/chainconfig.go new file mode 100644 index 00000000..fa571be3 --- /dev/null +++ b/codegen/subgraph/chainconfig.go @@ -0,0 +1,94 @@ +package subgraph + +type ChainConfig struct { + ID string // Public + DisplayName string // Public + ExplorerLink string + ApiEndpoint string + FirehoseEndpoint string + Network string + SupportsCalls bool +} + +var ChainConfigByID = map[string]*ChainConfig{ + "mainnet": { + DisplayName: "Ethereum Mainnet", + ExplorerLink: "https://etherscan.io", + ApiEndpoint: "https://api.etherscan.io", + FirehoseEndpoint: "mainnet.eth.streamingfast.io:443", + Network: "mainnet", + SupportsCalls: true, + }, + "bnb": { + DisplayName: "BNB", + ExplorerLink: "https://bscscan.com", + ApiEndpoint: "https://api.bscscan.com", + FirehoseEndpoint: "bnb.streamingfast.io:443", + Network: "bsc", + SupportsCalls: true, + }, + "polygon": { + DisplayName: "Polygon", + ExplorerLink: "https://polygonscan.com", + ApiEndpoint: "https://api.polygonscan.com", + FirehoseEndpoint: "polygon.streamingfast.io:443", + Network: "polygon", + SupportsCalls: true, + }, + "amoy": { + DisplayName: "Polygon Amoy Testnet", + ExplorerLink: "https://www.okx.com/web3/explorer/amoy", + ApiEndpoint: "", + FirehoseEndpoint: "amoy.substreams.pinax.network:443", + Network: "amoy", + SupportsCalls: true, + }, + "arbitrum": { + DisplayName: "Arbitrum", + ExplorerLink: "https://arbiscan.io", + ApiEndpoint: "https://api.arbiscan.io", + FirehoseEndpoint: "arb-one.streamingfast.io:443", + Network: "arbitrum", + SupportsCalls: true, + }, + "holesky": { + DisplayName: "Holesky", + ExplorerLink: "https://holesky.etherscan.io/", + ApiEndpoint: "https://api-holesky.etherscan.io", + FirehoseEndpoint: "holesky.eth.streamingfast.io:443", + Network: "holesky", + SupportsCalls: true, + }, + "sepolia": { + DisplayName: "Sepolia Testnet", + ExplorerLink: "https://sepolia.etherscan.io", + ApiEndpoint: "https://api-sepolia.etherscan.io", + FirehoseEndpoint: "sepolia.streamingfast.io:443", + Network: "sepolia", + SupportsCalls: true, + }, + "optimism": { + DisplayName: "Optimism Mainnet", + ExplorerLink: "https://optimistic.etherscan.io", + ApiEndpoint: "https://api-optimistic.etherscan.io", + FirehoseEndpoint: "opt-mainnet.streamingfast.io:443", + Network: "optimism", + SupportsCalls: false, + }, + "avalanche": { + DisplayName: "Avalanche C-chain", + ExplorerLink: "https://subnets.avax.network/c-chain", + ApiEndpoint: "", + FirehoseEndpoint: "avalanche-mainnet.streamingfast.io:443", + Network: "avalanche", + SupportsCalls: false, + }, + "chapel": { + DisplayName: "BNB Chapel Testnet", + ExplorerLink: "https://testnet.bscscan.com/", + ApiEndpoint: "", + FirehoseEndpoint: "chapel.substreams.pinax.network:443", + Network: "chapel", + SupportsCalls: true, + }, +} diff --git a/codegen/subgraph/helpers.go b/codegen/subgraph/helpers.go new file mode 100644 index 00000000..f4a0fc12 --- /dev/null +++ b/codegen/subgraph/helpers.go @@ -0,0 +1,153 @@ +package subgraph + +import ( + "fmt" + "strings" + + pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" + "google.golang.org/protobuf/types/descriptorpb" +) + +func GetProjectEntities(outputDescriptor *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) (map[string]Entity, error) { + var outputMap = map[string]Entity{} + err := buildEntitiesMapping(outputDescriptor, outputMap, protoTypeMapping) + if err != nil { + return nil, fmt.Errorf("getting entities: %w", err) + } + + return outputMap, nil +} + +type Entity struct { + NestedEntitiesMapping map[string]string + protoMessage *descriptorpb.DescriptorProto + protoPath string + ProtobufPath string + HasClassicTypes bool + NameAsProto string + NameAsEntity string +} + +func getMessageProtoPath(message *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) (string, error) { + for protoPath, currentMessage := range protoTypeMapping { + if currentMessage == message { + return protoPath, nil + } + } + + return "", fmt.Errorf("proto path not found for message %q", message.Name) +} + +func buildEntitiesMapping(message *descriptorpb.DescriptorProto, inputMap map[string]Entity, protoTypeMapping map[string]*descriptorpb.DescriptorProto) error { + protoPath, err := getMessageProtoPath(message, protoTypeMapping) + if err != nil { + return fmt.Errorf("getting proto path: %w", err) + } + + protobufPath, _ := strings.CutPrefix(protoPath, ".") + protobufPath = strings.ReplaceAll(protobufPath, ".", "/") + + var entity = Entity{ + protoMessage: message, + protoPath: protoPath, + ProtobufPath: protobufPath, + NestedEntitiesMapping: map[string]string{}, + HasClassicTypes: false, + NameAsProto: "proto" + message.GetName(), + NameAsEntity: "entity" + message.GetName(), + } + + for _, field := range message.GetField() { + switch *field.Type { + case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: + sanitizeFieldName := (*field.TypeName)[strings.LastIndex(*field.TypeName, ".")+1:] + switch *field.Label { + case descriptorpb.FieldDescriptorProto_LABEL_REPEATED: + entity.NestedEntitiesMapping[field.GetName()] = "[" + sanitizeFieldName + "!]!" + case descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL: + entity.NestedEntitiesMapping[field.GetName()] = sanitizeFieldName + "!" + case descriptorpb.FieldDescriptorProto_LABEL_REQUIRED: + entity.NestedEntitiesMapping[field.GetName()] = sanitizeFieldName + "!" + default: + return fmt.Errorf("field label %q not supported", *field.Label) + } + nestedMessage := protoTypeMapping[*field.TypeName] + err := buildEntitiesMapping(nestedMessage, inputMap, protoTypeMapping) + if err != nil { + return fmt.Errorf("getting entity from message: %w", err) + } + case descriptorpb.FieldDescriptorProto_TYPE_ENUM: + return fmt.Errorf("enum type not supported") + default: + entity.HasClassicTypes = true + } + } + + entityName := message.GetName() + inputMap[entityName] = entity + return nil +} + +func GetExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[string]*descriptorpb.DescriptorProto { + var protoTypeMapping = map[string]*descriptorpb.DescriptorProto{} + for _, protoFile := range protoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + currentName := "." + packageName + "." + message.GetName() + protoTypeMapping[currentName] = message + processMessage(message, currentName, protoTypeMapping) + } + } + + return protoTypeMapping +} + +func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { + for _, nestedMessage := range message.NestedType { + currentName := "." + parentName + "." + nestedMessage.GetName() + protoTypeMapping[currentName] = nestedMessage + processMessage(nestedMessage, currentName, protoTypeMapping) + } +} + +func GetModule(pkg *pbsubstreams.Package, moduleName string) (*pbsubstreams.Module, error) { + existingModules := pkg.GetModules().GetModules() + for _, module := range existingModules { + if (module.Name) == moduleName { + return module, nil + } + } + + return nil, fmt.Errorf("module %q does not exists", moduleName) +} + +func SearchForMessageTypeIntoPackage(pkg *pbsubstreams.Package, outputType string) (*descriptorpb.DescriptorProto, error) { + sanitizeMessageType := outputType[strings.Index(outputType, ":")+1:] + for _, protoFile := range pkg.ProtoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + if packageName+"."+message.GetName() == sanitizeMessageType { + return message, nil + } + + nestedMessage := checkNestedMessages(message, packageName, sanitizeMessageType) + if nestedMessage != nil { + return nestedMessage, nil + } + } + } + + return nil, fmt.Errorf("message type %q not found in package", sanitizeMessageType) +} + +func checkNestedMessages(message *descriptorpb.DescriptorProto, packageName, messageType string) *descriptorpb.DescriptorProto { + for _, nestedMessage := range message.NestedType { + if packageName+"."+message.GetName()+"."+nestedMessage.GetName() == messageType { + return nestedMessage + } + + checkNestedMessages(nestedMessage, packageName, messageType) + } + + return nil +} diff --git a/codegen/subgraph/project.go b/codegen/subgraph/project.go new file mode 100644 index 00000000..34bbde45 --- /dev/null +++ b/codegen/subgraph/project.go @@ -0,0 +1,143 @@ +package subgraph + +import ( + "bytes" + "embed" + "fmt" + "html/template" + "io/fs" + "strings" + + "github.com/bmatcuk/doublestar/v4" + pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" + "google.golang.org/protobuf/types/descriptorpb" +) + +type Project struct { + Name string + Network string + Module *pbsubstreams.Module + OutputDescriptor *descriptorpb.DescriptorProto + EntitiesMapping map[string]Entity +} + +func NewProject(name, network string, module *pbsubstreams.Module, outputDescriptor *descriptorpb.DescriptorProto, entitiesMapping map[string]Entity) *Project { + return &Project{ + Network: network, + Name: name, + Module: module, + OutputDescriptor: outputDescriptor, + EntitiesMapping: entitiesMapping, + } +} + +func (p *Project) GetMainEntity() Entity { + return p.EntitiesMapping[p.OutputDescriptor.GetName()] +} + +func (p *Project) GetMainEntityName() string { + return p.OutputDescriptor.GetName() +} + +func (p *Project) SubstreamsKebabName() string { + return strings.ReplaceAll(p.Name, "_", "-") +} + +func (p *Project) GetModuleName() string { + return p.Module.Name +} + +func (p *Project) GetModuleOutputProtoPath() string { + return p.Module.Output.Type[strings.LastIndex(p.Module.Output.Type, ":")+1:] +} + +func (p *Project) GetModuleOutputProtobufPath() string { + return strings.ReplaceAll(p.GetModuleOutputProtoPath(), ".", "/") +} + +func (p *Project) GetEntities() map[string]Entity { + return p.EntitiesMapping +} + +func (p *Project) ChainEndpoint() string { return ChainConfigByID[p.Network].FirehoseEndpoint } + +func (p *Project) Render(withDevEnv bool) (projectFiles map[string][]byte, err error) { + projectFiles = map[string][]byte{} + + funcMap := template.FuncMap{ + "arr": func(els ...any) []any { + return els + }, + } + + tpls, err := ParseFS(funcMap, templatesFS, "**/*.gotmpl") + if err != nil { + return nil, fmt.Errorf("parse templates: %w", err) + } + + templateFiles := map[string]string{ + "triggers/Makefile.gotmpl": "Makefile", + "triggers/buf.gen.yaml": "buf.gen.yaml", + "triggers/package.json.gotmpl": "package.json", + "triggers/tsconfig.json": "tsconfig.json", + "triggers/subgraph.yaml.gotmpl": "subgraph.yaml", + "triggers/schema.graphql.gotmpl": "schema.graphql", + "triggers/src/mappings.ts.gotmpl": "src/mappings.ts", + "triggers/run-local.sh.gotmpl": "run-local.sh", + } + + if withDevEnv { + templateFiles["triggers/dev-environment/docker-compose.yml"] = "dev-environment/docker-compose.yml" + templateFiles["triggers/dev-environment/start.sh"] = "dev-environment/start.sh" + templateFiles["triggers/dev-environment/config.toml.gotmpl"] = "dev-environment/config.toml" + } + + for templateFile, finalFileName := range templateFiles { + var content []byte + if strings.HasSuffix(templateFile, ".gotmpl") { + buffer := &bytes.Buffer{} + if err := tpls.ExecuteTemplate(buffer, templateFile, p); err != nil { + return nil, fmt.Errorf("embed render entry template %q: %w", templateFile, err) + } + content = buffer.Bytes() + } else { + content, err = templatesFS.ReadFile("templates/" + templateFile) + if err != nil { + return nil, fmt.Errorf("reading %q: %w", templateFile, err) + } + } + + projectFiles[finalFileName] = content + } + + return +} + +//go:embed templates/* +var templatesFS embed.FS + +func ParseFS(myFuncs template.FuncMap, fsys fs.FS, pattern string) (*template.Template, error) { + t := template.New("").Funcs(myFuncs) + filenames, err := doublestar.Glob(fsys, pattern) + if err != nil { + return nil, err + } + if len(filenames) == 0 { + return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) + } + + for _, filename := range filenames { + b, err := fs.ReadFile(fsys, filename) + if err != nil { + return nil, err + } + + name, _ := strings.CutPrefix(filename, "templates/") + + _, err = t.New(name).Parse(string(b)) + if err != nil { + return nil, err + } + } + return t, nil +} diff --git a/codegen/subgraph/subgraph.go b/codegen/subgraph/subgraph.go new file mode 100644 index 00000000..09c3a3f2 --- /dev/null +++ b/codegen/subgraph/subgraph.go @@ -0,0 +1,92 @@ +package subgraph + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + "github.com/streamingfast/cli/sflags" + "github.com/streamingfast/substreams/manifest" +) + +var SubgraphCmd = &cobra.Command{ + Use: "subgraph ", + Short: "Generate subgraph dev environment from substreams manifest", + Args: cobra.ExactArgs(3), + RunE: generateSubgraphEnv, +} + +func init() { + SubgraphCmd.Flags().Bool("with-dev-env", false, "generate graph node dev environment") +} + +func generateSubgraphEnv(cmd *cobra.Command, args []string) error { + manifestPath := args[0] + moduleName := args[1] + networkName := args[2] + reader, err := manifest.NewReader(manifestPath) + if err != nil { + return fmt.Errorf("manifest reader: %w", err) + } + + withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") + + pkg, _, err := reader.Read() + if err != nil { + return fmt.Errorf("read manifest %q: %w", manifestPath, err) + } + + requestedModule, err := GetModule(pkg, moduleName) + if err != nil { + return fmt.Errorf("getting module: %w", err) + } + + if pkg.GetPackageMeta()[0] == nil { + return fmt.Errorf("package meta not found") + } + + projectName := pkg.GetPackageMeta()[0].Name + + messageDescriptor, err := SearchForMessageTypeIntoPackage(pkg, requestedModule.Output.Type) + if err != nil { + return fmt.Errorf("searching for message type: %w", err) + } + + protoTypeMapping := GetExistingProtoTypes(pkg.ProtoFiles) + + entitiesMapping, err := GetProjectEntities(messageDescriptor, protoTypeMapping) + if err != nil { + panic(fmt.Errorf("getting entities: %w", err)) + } + + project := NewProject(projectName, networkName, requestedModule, messageDescriptor, entitiesMapping) + + projectFiles, err := project.Render(withDevEnv) + if err != nil { + return fmt.Errorf("rendering project files: %w", err) + } + + saveDir := "/tmp/testSubCmd2/" + + err = os.MkdirAll(saveDir, 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", saveDir, err) + } + + for fileName, fileContent := range projectFiles { + filePath := filepath.Join(saveDir, fileName) + + err := os.MkdirAll(filepath.Dir(filePath), 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", filepath.Dir(filePath), err) + } + + err = os.WriteFile(filePath, fileContent, 0644) + if err != nil { + return fmt.Errorf("saving file %s: %w", filePath, err) + } + } + + return nil +} diff --git a/codegen/templates/triggers/Makefile.gotmpl b/codegen/subgraph/templates/triggers/Makefile.gotmpl similarity index 100% rename from codegen/templates/triggers/Makefile.gotmpl rename to codegen/subgraph/templates/triggers/Makefile.gotmpl diff --git a/codegen/templates/triggers/README.md b/codegen/subgraph/templates/triggers/README.md similarity index 100% rename from codegen/templates/triggers/README.md rename to codegen/subgraph/templates/triggers/README.md diff --git a/codegen/templates/triggers/buf.gen.yaml b/codegen/subgraph/templates/triggers/buf.gen.yaml similarity index 100% rename from codegen/templates/triggers/buf.gen.yaml rename to codegen/subgraph/templates/triggers/buf.gen.yaml diff --git a/codegen/templates/triggers/dev-environment/config.toml.gotmpl b/codegen/subgraph/templates/triggers/dev-environment/config.toml.gotmpl similarity index 94% rename from codegen/templates/triggers/dev-environment/config.toml.gotmpl rename to codegen/subgraph/templates/triggers/dev-environment/config.toml.gotmpl index f23f1f7d..4b5a4d8f 100644 --- a/codegen/templates/triggers/dev-environment/config.toml.gotmpl +++ b/codegen/subgraph/templates/triggers/dev-environment/config.toml.gotmpl @@ -9,7 +9,7 @@ pool_size = 10 [chains] ingestor = "block_ingestor_node" -[chains.{{ .ChainName }}] +[chains.{{ .Network }}] protocol = "substreams" shard = "primary" provider = [ diff --git a/codegen/templates/triggers/dev-environment/docker-compose.yml b/codegen/subgraph/templates/triggers/dev-environment/docker-compose.yml similarity index 100% rename from codegen/templates/triggers/dev-environment/docker-compose.yml rename to codegen/subgraph/templates/triggers/dev-environment/docker-compose.yml diff --git a/codegen/templates/triggers/dev-environment/start.sh b/codegen/subgraph/templates/triggers/dev-environment/start.sh similarity index 100% rename from codegen/templates/triggers/dev-environment/start.sh rename to codegen/subgraph/templates/triggers/dev-environment/start.sh diff --git a/codegen/templates/triggers/package.json.gotmpl b/codegen/subgraph/templates/triggers/package.json.gotmpl similarity index 90% rename from codegen/templates/triggers/package.json.gotmpl rename to codegen/subgraph/templates/triggers/package.json.gotmpl index f8d3f574..48fc93cf 100644 --- a/codegen/templates/triggers/package.json.gotmpl +++ b/codegen/subgraph/templates/triggers/package.json.gotmpl @@ -5,7 +5,7 @@ "codegen": "graph codegen", "build": "graph build", "publish": "graph publish", - "generate": "buf generate", + "generate": "buf generate --type=\"{{ $.GetModuleOutputProtoPath}}\" {{ $.SubstreamsKebabName }}-v0.1.0.spkg#format=bin", "deploy": "graph deploy --node https://api.studio.thegraph.com/deploy/ {{ .Name }}", "create-local": "graph create --node http://localhost:8020/ {{ .Name }}", "remove-local": "graph remove --node http://localhost:8020/ {{ .Name }}", diff --git a/codegen/templates/triggers/run-local.sh.gotmpl b/codegen/subgraph/templates/triggers/run-local.sh.gotmpl similarity index 95% rename from codegen/templates/triggers/run-local.sh.gotmpl rename to codegen/subgraph/templates/triggers/run-local.sh.gotmpl index e0577d80..d293626c 100644 --- a/codegen/templates/triggers/run-local.sh.gotmpl +++ b/codegen/subgraph/templates/triggers/run-local.sh.gotmpl @@ -6,9 +6,6 @@ ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" example_query=" { - {{ $.GetEntityOutputName }} { - - } } " diff --git a/codegen/subgraph/templates/triggers/schema.graphql.gotmpl b/codegen/subgraph/templates/triggers/schema.graphql.gotmpl new file mode 100644 index 00000000..63ed1a1c --- /dev/null +++ b/codegen/subgraph/templates/triggers/schema.graphql.gotmpl @@ -0,0 +1,11 @@ +{{- range $entityName, $entity := $.GetEntities }} +type {{ $entity.NameAsEntity }} @entity { + id: ID! + {{- range $fieldName, $graphqlType := $entity.NestedEntitiesMapping }} + {{ $fieldName }}: {{ $graphqlType }} + {{- end }} + {{if $entity.HasClassicTypes }} + jsonValue: String! + {{- end -}} +} +{{- end }} \ No newline at end of file diff --git a/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl b/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl new file mode 100644 index 00000000..db39d5c0 --- /dev/null +++ b/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl @@ -0,0 +1,48 @@ +import { log } from "@graphprotocol/graph-ts"; +import { Protobuf } from "as-proto/assembly"; +import { JSON } from "assemblyscript-json"; +{{- range $entityName, $entity := $.GetEntities }} +import { {{ $entityName}} as {{$entity.NameAsProto}}} from "./pb/{{ $entity.ProtobufPath }}"; +import { {{ $entity.NameAsEntity }} } from "../generated/schema"; +{{- end -}} + +export function handleTriggers(bytes: Uint8Array): void { + {{ $mainEntity := $.GetMainEntity }} + const $mainEntity.NameAsProto = Protobuf.decode<{{ $mainEntity.NameAsProto}}>( + bytes, + {{ $mainEntity.NameAsProto}}.decode + ); + + let mainId = crypto.keccak256(Bytes.fromUint8Array(bytes)).toHexString(); + + {{ template "entityFromProto" (arr $mainEntity "mainId") }} +} + +function ID(parentID: string, messageName: string): string { + return parentID + "-" + messageName; +} + +function IDWithIndex(parentID: string, messageName: string, index: number): string { + return parentID + "-" + messageName + "-" + index.toString(); +} + + +{{define "entityFromProto"}} + {{ $parentEntity := index . 0 }} + {{ $parentId := index . 1 }} + let $parentEntity.NameAsEntity = new {{ $parentEntity.NameAsEntity }}(parentId); + + - for nested entities + let {{ $parentId }} = ID(parentId + "nestedEntityName") + let nestedEntity = new NestedEntity(nestedEntityID) + {{ template "entityFromProto" (arr .nestedEntity nestedEntityID) }} + + //Handle classic fields + -Create entityJsonValue JSON object + -set all classic fields + -$parentEntity.NameAsEntity.jsonValue = entityJsonValue.ToString() + -save + + $parentEntity.NameAsEntity.save() +{{end}} + diff --git a/codegen/templates/triggers/subgraph.yaml.gotmpl b/codegen/subgraph/templates/triggers/subgraph.yaml.gotmpl similarity index 100% rename from codegen/templates/triggers/subgraph.yaml.gotmpl rename to codegen/subgraph/templates/triggers/subgraph.yaml.gotmpl diff --git a/codegen/templates/triggers/tsconfig.json b/codegen/subgraph/templates/triggers/tsconfig.json similarity index 100% rename from codegen/templates/triggers/tsconfig.json rename to codegen/subgraph/templates/triggers/tsconfig.json diff --git a/codegen/templates/triggers/schema.graphql.gotmpl b/codegen/templates/triggers/schema.graphql.gotmpl deleted file mode 100644 index a5b59e7a..00000000 --- a/codegen/templates/triggers/schema.graphql.gotmpl +++ /dev/null @@ -1,9 +0,0 @@ -{{- range $entityName, $entityMapping := $.GetEntities }} -type {{ $entityName }} @entity { - id: ID! - {{- range $fieldName, $graphqlType := $entityMapping }} - {{ $fieldName }}: {{ $graphqlType }} - {{- end }} -} - -{{ end -}} \ No newline at end of file diff --git a/codegen/templates/triggers/src/mappings.ts.gotmpl b/codegen/templates/triggers/src/mappings.ts.gotmpl deleted file mode 100644 index d78bcc9f..00000000 --- a/codegen/templates/triggers/src/mappings.ts.gotmpl +++ /dev/null @@ -1,16 +0,0 @@ -import { log } from "@graphprotocol/graph-ts"; -import { Protobuf } from "as-proto/assembly"; -import { JSON } from "assemblyscript-json"; -import { {{ $.GetEntityOutputName }} } from "../generated/schema"; -import { {{ $.GetEntityOutputName}} } from "./pb/contract/v1/Triggers"; - - -export function handleTriggers(bytes: Uint8Array): void { - const {{ $.GetEntityOutputName }} = Protobuf.decode<{{ $.GetEntityOutputName}}>( - bytes, - {{ $.GetEntityOutputName }}.decode - ); - - test -} - From f140b1d6b76576919ea6e34e8ac183e323a671f9 Mon Sep 17 00:00:00 2001 From: arnaudberger Date: Tue, 23 Jul 2024 11:46:16 -0400 Subject: [PATCH 3/6] add SQL command, simplify subgraph --- codegen/{subgraph => }/chainconfig.go | 2 +- codegen/cmd.go | 6 +- codegen/helpers.go | 179 ++++++++++++++++ codegen/project.go | 202 ++++++++++++++++++ codegen/sql.go | 29 +++ codegen/subgraph.go | 29 +++ codegen/subgraph/helpers.go | 153 ------------- codegen/subgraph/project.go | 143 ------------- codegen/subgraph/subgraph.go | 92 -------- .../templates/triggers/schema.graphql.gotmpl | 11 - .../templates/triggers/src/mappings.ts.gotmpl | 48 ----- codegen/templates/sql/Makefile.gotmpl | 63 ++++++ codegen/templates/sql/README.md.gotmpl | 59 +++++ .../dev-environment/docker-compose.yml.gotmpl | 51 +++++ .../sql/dev-environment/start.sh.gotmpl | 65 ++++++ codegen/templates/sql/run-local.sh.gotmpl | 50 +++++ .../sql/schema.clickhouse.sql.gotmpl | 7 + codegen/templates/sql/schema.sql.gotmpl | 3 + .../sql/substreams.clickhouse.yaml.gotmpl | 12 ++ .../templates/sql/substreams.sql.yaml.gotmpl | 10 + codegen/templates/sql/substreams.yaml.gotmpl | 3 + .../templates/triggers/Makefile.gotmpl | 0 .../templates/triggers/README.md | 0 .../templates/triggers/buf.gen.yaml | 0 .../dev-environment/config.toml.gotmpl | 0 .../dev-environment/docker-compose.yml | 0 .../triggers/dev-environment/start.sh | 0 .../templates/triggers/package.json.gotmpl | 2 +- .../templates/triggers/run-local.sh.gotmpl | 0 .../templates/triggers/schema.graphql.gotmpl | 11 + .../templates/triggers/src/mappings.ts.gotmpl | 38 ++++ .../templates/triggers/subgraph.yaml.gotmpl | 0 .../templates/triggers/tsconfig.json | 0 go.mod | 1 + go.sum | 2 + 35 files changed, 820 insertions(+), 451 deletions(-) rename codegen/{subgraph => }/chainconfig.go (99%) create mode 100644 codegen/helpers.go create mode 100644 codegen/project.go create mode 100644 codegen/sql.go create mode 100644 codegen/subgraph.go delete mode 100644 codegen/subgraph/helpers.go delete mode 100644 codegen/subgraph/project.go delete mode 100644 codegen/subgraph/subgraph.go delete mode 100644 codegen/subgraph/templates/triggers/schema.graphql.gotmpl delete mode 100644 codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl create mode 100644 codegen/templates/sql/Makefile.gotmpl create mode 100644 codegen/templates/sql/README.md.gotmpl create mode 100644 codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl create mode 100644 codegen/templates/sql/dev-environment/start.sh.gotmpl create mode 100644 codegen/templates/sql/run-local.sh.gotmpl create mode 100644 codegen/templates/sql/schema.clickhouse.sql.gotmpl create mode 100644 codegen/templates/sql/schema.sql.gotmpl create mode 100644 codegen/templates/sql/substreams.clickhouse.yaml.gotmpl create mode 100644 codegen/templates/sql/substreams.sql.yaml.gotmpl create mode 100644 codegen/templates/sql/substreams.yaml.gotmpl rename codegen/{subgraph => }/templates/triggers/Makefile.gotmpl (100%) rename codegen/{subgraph => }/templates/triggers/README.md (100%) rename codegen/{subgraph => }/templates/triggers/buf.gen.yaml (100%) rename codegen/{subgraph => }/templates/triggers/dev-environment/config.toml.gotmpl (100%) rename codegen/{subgraph => }/templates/triggers/dev-environment/docker-compose.yml (100%) rename codegen/{subgraph => }/templates/triggers/dev-environment/start.sh (100%) rename codegen/{subgraph => }/templates/triggers/package.json.gotmpl (90%) rename codegen/{subgraph => }/templates/triggers/run-local.sh.gotmpl (100%) create mode 100644 codegen/templates/triggers/schema.graphql.gotmpl create mode 100644 codegen/templates/triggers/src/mappings.ts.gotmpl rename codegen/{subgraph => }/templates/triggers/subgraph.yaml.gotmpl (100%) rename codegen/{subgraph => }/templates/triggers/tsconfig.json (100%) diff --git a/codegen/subgraph/chainconfig.go b/codegen/chainconfig.go similarity index 99% rename from codegen/subgraph/chainconfig.go rename to codegen/chainconfig.go index fa571be3..4c38eaf3 100644 --- a/codegen/subgraph/chainconfig.go +++ b/codegen/chainconfig.go @@ -1,4 +1,4 @@ -package subgraph +package codegen type ChainConfig struct { ID string // Public diff --git a/codegen/cmd.go b/codegen/cmd.go index 3082bfa7..cac649da 100644 --- a/codegen/cmd.go +++ b/codegen/cmd.go @@ -2,11 +2,13 @@ package codegen import ( "github.com/spf13/cobra" - "github.com/streamingfast/substreams/codegen/subgraph" ) var Cmd = &cobra.Command{Use: "codegen", Short: "Code generator for substreams"} func init() { - Cmd.AddCommand(subgraph.SubgraphCmd) + SubgraphCmd.Flags().Bool("with-dev-env", false, "generate graph node dev environment") + + Cmd.AddCommand(SubgraphCmd) + Cmd.AddCommand(SQLCmd) } diff --git a/codegen/helpers.go b/codegen/helpers.go new file mode 100644 index 00000000..4394dd05 --- /dev/null +++ b/codegen/helpers.go @@ -0,0 +1,179 @@ +package codegen + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/streamingfast/substreams/manifest" + + "github.com/charmbracelet/huh" + + pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" + "google.golang.org/protobuf/types/descriptorpb" +) + +const outputTypeSQL = "sql" +const outputTypeSubgraph = "subgraph" + +func getModule(pkg *pbsubstreams.Package, moduleName string) (*pbsubstreams.Module, error) { + existingModules := pkg.GetModules().GetModules() + for _, module := range existingModules { + if (module.Name) == moduleName { + return module, nil + } + } + + return nil, fmt.Errorf("module %q does not exists", moduleName) +} + +func searchForMessageTypeIntoPackage(pkg *pbsubstreams.Package, outputType string) (*descriptorpb.DescriptorProto, error) { + sanitizeMessageType := outputType[strings.Index(outputType, ":")+1:] + for _, protoFile := range pkg.ProtoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + if packageName+"."+message.GetName() == sanitizeMessageType { + return message, nil + } + + nestedMessage := checkNestedMessages(message, packageName, sanitizeMessageType) + if nestedMessage != nil { + return nestedMessage, nil + } + } + } + + return nil, fmt.Errorf("message type %q not found in package", sanitizeMessageType) +} + +func checkNestedMessages(message *descriptorpb.DescriptorProto, packageName, messageType string) *descriptorpb.DescriptorProto { + for _, nestedMessage := range message.NestedType { + if packageName+"."+message.GetName()+"."+nestedMessage.GetName() == messageType { + return nestedMessage + } + + checkNestedMessages(nestedMessage, packageName, messageType) + } + + return nil +} + +func getExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[string]*descriptorpb.DescriptorProto { + var protoTypeMapping = map[string]*descriptorpb.DescriptorProto{} + for _, protoFile := range protoFiles { + packageName := protoFile.GetPackage() + for _, message := range protoFile.MessageType { + currentName := "." + packageName + "." + message.GetName() + protoTypeMapping[currentName] = message + processMessage(message, currentName, protoTypeMapping) + } + } + + return protoTypeMapping +} + +func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { + for _, nestedMessage := range message.NestedType { + currentName := "." + parentName + "." + nestedMessage.GetName() + protoTypeMapping[currentName] = nestedMessage + processMessage(nestedMessage, currentName, protoTypeMapping) + } +} + +func buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputType string, withDevEnv bool) error { + reader, err := manifest.NewReader(manifestPath) + if err != nil { + return fmt.Errorf("manifest reader: %w", err) + } + + pkg, _, err := reader.Read() + if err != nil { + return fmt.Errorf("read manifest %q: %w", manifestPath, err) + } + + requestedModule, err := getModule(pkg, moduleName) + if err != nil { + return fmt.Errorf("getting module: %w", err) + } + + if outputType == outputTypeSQL { + if requestedModule.Output.Type != "proto:sf.substreams.sink.database.v1.DatabaseChanges" { + return fmt.Errorf("requested module shoud have proto:sf.substreams.sink.database.v1.DatabaseChanges as output type") + } + } + + if pkg.GetPackageMeta()[0] == nil { + return fmt.Errorf("package meta not found") + } + + projectName := pkg.GetPackageMeta()[0].Name + + messageDescriptor, err := searchForMessageTypeIntoPackage(pkg, requestedModule.Output.Type) + if err != nil { + return fmt.Errorf("searching for message type: %w", err) + } + + protoTypeMapping := getExistingProtoTypes(pkg.ProtoFiles) + project := NewProject(projectName, networkName, requestedModule, messageDescriptor, protoTypeMapping) + + // Create an example entity from the output descriptor + project.BuildExampleEntity() + + projectFiles, err := project.Render(outputType, withDevEnv) + if err != nil { + return fmt.Errorf("rendering project files: %w", err) + } + + saveDir, err := createSaveDirForm() + if err != nil { + fmt.Println("creating save directory: %w", err) + } + + err = saveProjectFiles(projectFiles, saveDir) + if err != nil { + fmt.Println("saving project files: %w", err) + } + + return nil +} + +func createSaveDirForm() (string, error) { + saveDir := "output_sps" + if cwd, err := os.Getwd(); err == nil { + saveDir = filepath.Join(cwd, saveDir) + } + + inputField := huh.NewInput().Title("In which directory do you want to generate the project?").Value(&saveDir) + var WITH_ACCESSIBLE = false + + err := huh.NewForm(huh.NewGroup(inputField)).WithTheme(huh.ThemeCharm()).WithAccessible(WITH_ACCESSIBLE).Run() + if err != nil { + return "", fmt.Errorf("failed taking input: %w", err) + } + + return saveDir, nil +} + +func saveProjectFiles(projectFiles map[string][]byte, saveDir string) error { + err := os.MkdirAll(saveDir, 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", saveDir, err) + } + + for fileName, fileContent := range projectFiles { + filePath := filepath.Join(saveDir, fileName) + + err := os.MkdirAll(filepath.Dir(filePath), 0755) + if err != nil { + return fmt.Errorf("creating directory %s: %w", filepath.Dir(filePath), err) + } + + err = os.WriteFile(filePath, fileContent, 0644) + if err != nil { + return fmt.Errorf("saving file %s: %w", filePath, err) + } + } + + return nil +} diff --git a/codegen/project.go b/codegen/project.go new file mode 100644 index 00000000..e3b0a7a2 --- /dev/null +++ b/codegen/project.go @@ -0,0 +1,202 @@ +package codegen + +import ( + "bytes" + "embed" + "fmt" + "io/fs" + "strings" + "text/template" + + "github.com/golang-cz/textcase" + + "github.com/bmatcuk/doublestar/v4" + pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" + "google.golang.org/protobuf/types/descriptorpb" +) + +type Project struct { + Name string + Network string + Module *pbsubstreams.Module + OutputDescriptor *descriptorpb.DescriptorProto + protoTypeMapping map[string]*descriptorpb.DescriptorProto + ExampleEntity *ExampleEntity +} + +type ExampleEntity struct { + Name string + NameAsProtoField string + NameAsEntity string + ID string +} + +func NewProject(name, network string, module *pbsubstreams.Module, outputDescriptor *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) *Project { + return &Project{ + Network: network, + Name: name, + Module: module, + OutputDescriptor: outputDescriptor, + protoTypeMapping: protoTypeMapping, + } +} + +func (p *Project) BuildExampleEntity() { + for _, field := range p.OutputDescriptor.Field { + if *field.Type == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { + if *field.Label == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { + splitMessagePath := strings.Split(*field.TypeName, ".") + name := splitMessagePath[len(splitMessagePath)-1] + + p.ExampleEntity = &ExampleEntity{ + NameAsProtoField: field.GetName(), + NameAsEntity: "My" + name, + Name: name, + } + + for _, nestedMessageField := range p.protoTypeMapping[*field.TypeName].Field { + switch *nestedMessageField.Type { + case descriptorpb.FieldDescriptorProto_TYPE_STRING, descriptorpb.FieldDescriptorProto_TYPE_INT64, descriptorpb.FieldDescriptorProto_TYPE_UINT64, descriptorpb.FieldDescriptorProto_TYPE_UINT32, descriptorpb.FieldDescriptorProto_TYPE_BYTES: + p.ExampleEntity.ID = textcase.CamelCase(nestedMessageField.GetName()) + default: + continue + } + } + } + } + } + return +} + +func (p *Project) ExampleEntityHasID() bool { + return p.ExampleEntity.ID != "" +} + +func (p *Project) HasExampleEntity() bool { + return p.ExampleEntity != nil + +} + +func (p *Project) SubstreamsKebabName() string { + return strings.ReplaceAll(p.Name, "_", "-") +} + +func (p *Project) GetModuleName() string { + return p.Module.Name +} + +func (p *Project) OutputName() string { + return p.OutputDescriptor.GetName() +} + +func (p *Project) ProtoOutputName() string { + return "proto" + p.OutputDescriptor.GetName() +} + +func (p *Project) ProtoOutputNameToSnake() string { + return textcase.SnakeCase("proto" + p.OutputDescriptor.GetName()) +} + +func (p *Project) GetOutputProtoPath() string { + return p.Module.Output.Type[strings.LastIndex(p.Module.Output.Type, ":")+1:] +} + +func (p *Project) GetOutputProtobufPath() string { + protobufPath := strings.ReplaceAll(p.GetOutputProtoPath(), ".", "/") + return protobufPath +} + +func (p *Project) ChainEndpoint() string { return ChainConfigByID[p.Network].FirehoseEndpoint } + +func (p *Project) Render(outputType string, withDevEnv bool) (projectFiles map[string][]byte, err error) { + projectFiles = map[string][]byte{} + + funcMap := template.FuncMap{ + "arr": func(els ...any) []any { + return els + }, + "toLower": strings.ToLower, + } + + tpls, err := ParseFS(funcMap, templatesFS, "**/*.gotmpl") + if err != nil { + return nil, fmt.Errorf("parse templates: %w", err) + } + + var templateFiles map[string]string + switch outputType { + case outputTypeSubgraph: + templateFiles = map[string]string{ + "triggers/Makefile.gotmpl": "Makefile", + "triggers/buf.gen.yaml": "buf.gen.yaml", + "triggers/package.json.gotmpl": "package.json", + "triggers/tsconfig.json": "tsconfig.json", + "triggers/subgraph.yaml.gotmpl": "subgraph.yaml", + "triggers/schema.graphql.gotmpl": "schema.graphql", + "triggers/src/mappings.ts.gotmpl": "src/mappings.ts", + "triggers/run-local.sh.gotmpl": "run-local.sh", + } + + if withDevEnv { + templateFiles["triggers/dev-environment/docker-compose.yml"] = "dev-environment/docker-compose.yml" + templateFiles["triggers/dev-environment/start.sh"] = "dev-environment/start.sh" + templateFiles["triggers/dev-environment/config.toml.gotmpl"] = "dev-environment/config.toml" + } + case outputTypeSQL: + templateFiles = map[string]string{ + "sql/Makefile.gotmpl": "Makefile", + } + if withDevEnv { + templateFiles[""] = "" + } + } + + for templateFile, finalFileName := range templateFiles { + var content []byte + if strings.HasSuffix(templateFile, ".gotmpl") { + buffer := &bytes.Buffer{} + if err := tpls.ExecuteTemplate(buffer, templateFile, p); err != nil { + return nil, fmt.Errorf("embed render entry template %q: %w", templateFile, err) + } + content = buffer.Bytes() + } else { + content, err = templatesFS.ReadFile("templates/" + templateFile) + if err != nil { + return nil, fmt.Errorf("reading %q: %w", templateFile, err) + } + } + + projectFiles[finalFileName] = content + } + + return +} + +//go:embed templates/* +var templatesFS embed.FS + +func ParseFS(myFuncs template.FuncMap, fsys fs.FS, pattern string) (*template.Template, error) { + t := template.New("").Funcs(myFuncs) + filenames, err := doublestar.Glob(fsys, pattern) + if err != nil { + return nil, err + } + if len(filenames) == 0 { + return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) + } + + for _, filename := range filenames { + b, err := fs.ReadFile(fsys, filename) + if err != nil { + return nil, err + } + + name, _ := strings.CutPrefix(filename, "templates/") + + _, err = t.New(name).Parse(string(b)) + if err != nil { + return nil, err + } + } + return t, nil +} diff --git a/codegen/sql.go b/codegen/sql.go new file mode 100644 index 00000000..70a0ad1e --- /dev/null +++ b/codegen/sql.go @@ -0,0 +1,29 @@ +package codegen + +import ( + "fmt" + + "github.com/spf13/cobra" + "github.com/streamingfast/cli/sflags" +) + +var SQLCmd = &cobra.Command{ + Use: "subgraph ", + Short: "Generate subgraph dev environment from substreams manifest", + Args: cobra.ExactArgs(3), + RunE: generateSQLEnv, +} + +func generateSQLEnv(cmd *cobra.Command, args []string) error { + manifestPath := args[0] + moduleName := args[1] + networkName := args[2] + withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") + + err := buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputTypeSubgraph, withDevEnv) + if err != nil { + return fmt.Errorf("building generate command: %w", err) + } + + return nil +} diff --git a/codegen/subgraph.go b/codegen/subgraph.go new file mode 100644 index 00000000..c5bf291d --- /dev/null +++ b/codegen/subgraph.go @@ -0,0 +1,29 @@ +package codegen + +import ( + "fmt" + + "github.com/spf13/cobra" + "github.com/streamingfast/cli/sflags" +) + +var SubgraphCmd = &cobra.Command{ + Use: "subgraph ", + Short: "Generate subgraph dev environment from substreams manifest", + Args: cobra.ExactArgs(3), + RunE: generateSubgraphEnv, +} + +func generateSubgraphEnv(cmd *cobra.Command, args []string) error { + manifestPath := args[0] + moduleName := args[1] + networkName := args[2] + withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") + + err := buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputTypeSubgraph, withDevEnv) + if err != nil { + return fmt.Errorf("building generate command: %w", err) + } + + return nil +} diff --git a/codegen/subgraph/helpers.go b/codegen/subgraph/helpers.go deleted file mode 100644 index f4a0fc12..00000000 --- a/codegen/subgraph/helpers.go +++ /dev/null @@ -1,153 +0,0 @@ -package subgraph - -import ( - "fmt" - "strings" - - pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" - "google.golang.org/protobuf/types/descriptorpb" -) - -func GetProjectEntities(outputDescriptor *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) (map[string]Entity, error) { - var outputMap = map[string]Entity{} - err := buildEntitiesMapping(outputDescriptor, outputMap, protoTypeMapping) - if err != nil { - return nil, fmt.Errorf("getting entities: %w", err) - } - - return outputMap, nil -} - -type Entity struct { - NestedEntitiesMapping map[string]string - protoMessage *descriptorpb.DescriptorProto - protoPath string - ProtobufPath string - HasClassicTypes bool - NameAsProto string - NameAsEntity string -} - -func getMessageProtoPath(message *descriptorpb.DescriptorProto, protoTypeMapping map[string]*descriptorpb.DescriptorProto) (string, error) { - for protoPath, currentMessage := range protoTypeMapping { - if currentMessage == message { - return protoPath, nil - } - } - - return "", fmt.Errorf("proto path not found for message %q", message.Name) -} - -func buildEntitiesMapping(message *descriptorpb.DescriptorProto, inputMap map[string]Entity, protoTypeMapping map[string]*descriptorpb.DescriptorProto) error { - protoPath, err := getMessageProtoPath(message, protoTypeMapping) - if err != nil { - return fmt.Errorf("getting proto path: %w", err) - } - - protobufPath, _ := strings.CutPrefix(protoPath, ".") - protobufPath = strings.ReplaceAll(protobufPath, ".", "/") - - var entity = Entity{ - protoMessage: message, - protoPath: protoPath, - ProtobufPath: protobufPath, - NestedEntitiesMapping: map[string]string{}, - HasClassicTypes: false, - NameAsProto: "proto" + message.GetName(), - NameAsEntity: "entity" + message.GetName(), - } - - for _, field := range message.GetField() { - switch *field.Type { - case descriptorpb.FieldDescriptorProto_TYPE_MESSAGE: - sanitizeFieldName := (*field.TypeName)[strings.LastIndex(*field.TypeName, ".")+1:] - switch *field.Label { - case descriptorpb.FieldDescriptorProto_LABEL_REPEATED: - entity.NestedEntitiesMapping[field.GetName()] = "[" + sanitizeFieldName + "!]!" - case descriptorpb.FieldDescriptorProto_LABEL_OPTIONAL: - entity.NestedEntitiesMapping[field.GetName()] = sanitizeFieldName + "!" - case descriptorpb.FieldDescriptorProto_LABEL_REQUIRED: - entity.NestedEntitiesMapping[field.GetName()] = sanitizeFieldName + "!" - default: - return fmt.Errorf("field label %q not supported", *field.Label) - } - nestedMessage := protoTypeMapping[*field.TypeName] - err := buildEntitiesMapping(nestedMessage, inputMap, protoTypeMapping) - if err != nil { - return fmt.Errorf("getting entity from message: %w", err) - } - case descriptorpb.FieldDescriptorProto_TYPE_ENUM: - return fmt.Errorf("enum type not supported") - default: - entity.HasClassicTypes = true - } - } - - entityName := message.GetName() - inputMap[entityName] = entity - return nil -} - -func GetExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[string]*descriptorpb.DescriptorProto { - var protoTypeMapping = map[string]*descriptorpb.DescriptorProto{} - for _, protoFile := range protoFiles { - packageName := protoFile.GetPackage() - for _, message := range protoFile.MessageType { - currentName := "." + packageName + "." + message.GetName() - protoTypeMapping[currentName] = message - processMessage(message, currentName, protoTypeMapping) - } - } - - return protoTypeMapping -} - -func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { - for _, nestedMessage := range message.NestedType { - currentName := "." + parentName + "." + nestedMessage.GetName() - protoTypeMapping[currentName] = nestedMessage - processMessage(nestedMessage, currentName, protoTypeMapping) - } -} - -func GetModule(pkg *pbsubstreams.Package, moduleName string) (*pbsubstreams.Module, error) { - existingModules := pkg.GetModules().GetModules() - for _, module := range existingModules { - if (module.Name) == moduleName { - return module, nil - } - } - - return nil, fmt.Errorf("module %q does not exists", moduleName) -} - -func SearchForMessageTypeIntoPackage(pkg *pbsubstreams.Package, outputType string) (*descriptorpb.DescriptorProto, error) { - sanitizeMessageType := outputType[strings.Index(outputType, ":")+1:] - for _, protoFile := range pkg.ProtoFiles { - packageName := protoFile.GetPackage() - for _, message := range protoFile.MessageType { - if packageName+"."+message.GetName() == sanitizeMessageType { - return message, nil - } - - nestedMessage := checkNestedMessages(message, packageName, sanitizeMessageType) - if nestedMessage != nil { - return nestedMessage, nil - } - } - } - - return nil, fmt.Errorf("message type %q not found in package", sanitizeMessageType) -} - -func checkNestedMessages(message *descriptorpb.DescriptorProto, packageName, messageType string) *descriptorpb.DescriptorProto { - for _, nestedMessage := range message.NestedType { - if packageName+"."+message.GetName()+"."+nestedMessage.GetName() == messageType { - return nestedMessage - } - - checkNestedMessages(nestedMessage, packageName, messageType) - } - - return nil -} diff --git a/codegen/subgraph/project.go b/codegen/subgraph/project.go deleted file mode 100644 index 34bbde45..00000000 --- a/codegen/subgraph/project.go +++ /dev/null @@ -1,143 +0,0 @@ -package subgraph - -import ( - "bytes" - "embed" - "fmt" - "html/template" - "io/fs" - "strings" - - "github.com/bmatcuk/doublestar/v4" - pbsubstreams "github.com/streamingfast/substreams/pb/sf/substreams/v1" - "google.golang.org/protobuf/types/descriptorpb" -) - -type Project struct { - Name string - Network string - Module *pbsubstreams.Module - OutputDescriptor *descriptorpb.DescriptorProto - EntitiesMapping map[string]Entity -} - -func NewProject(name, network string, module *pbsubstreams.Module, outputDescriptor *descriptorpb.DescriptorProto, entitiesMapping map[string]Entity) *Project { - return &Project{ - Network: network, - Name: name, - Module: module, - OutputDescriptor: outputDescriptor, - EntitiesMapping: entitiesMapping, - } -} - -func (p *Project) GetMainEntity() Entity { - return p.EntitiesMapping[p.OutputDescriptor.GetName()] -} - -func (p *Project) GetMainEntityName() string { - return p.OutputDescriptor.GetName() -} - -func (p *Project) SubstreamsKebabName() string { - return strings.ReplaceAll(p.Name, "_", "-") -} - -func (p *Project) GetModuleName() string { - return p.Module.Name -} - -func (p *Project) GetModuleOutputProtoPath() string { - return p.Module.Output.Type[strings.LastIndex(p.Module.Output.Type, ":")+1:] -} - -func (p *Project) GetModuleOutputProtobufPath() string { - return strings.ReplaceAll(p.GetModuleOutputProtoPath(), ".", "/") -} - -func (p *Project) GetEntities() map[string]Entity { - return p.EntitiesMapping -} - -func (p *Project) ChainEndpoint() string { return ChainConfigByID[p.Network].FirehoseEndpoint } - -func (p *Project) Render(withDevEnv bool) (projectFiles map[string][]byte, err error) { - projectFiles = map[string][]byte{} - - funcMap := template.FuncMap{ - "arr": func(els ...any) []any { - return els - }, - } - - tpls, err := ParseFS(funcMap, templatesFS, "**/*.gotmpl") - if err != nil { - return nil, fmt.Errorf("parse templates: %w", err) - } - - templateFiles := map[string]string{ - "triggers/Makefile.gotmpl": "Makefile", - "triggers/buf.gen.yaml": "buf.gen.yaml", - "triggers/package.json.gotmpl": "package.json", - "triggers/tsconfig.json": "tsconfig.json", - "triggers/subgraph.yaml.gotmpl": "subgraph.yaml", - "triggers/schema.graphql.gotmpl": "schema.graphql", - "triggers/src/mappings.ts.gotmpl": "src/mappings.ts", - "triggers/run-local.sh.gotmpl": "run-local.sh", - } - - if withDevEnv { - templateFiles["triggers/dev-environment/docker-compose.yml"] = "dev-environment/docker-compose.yml" - templateFiles["triggers/dev-environment/start.sh"] = "dev-environment/start.sh" - templateFiles["triggers/dev-environment/config.toml.gotmpl"] = "dev-environment/config.toml" - } - - for templateFile, finalFileName := range templateFiles { - var content []byte - if strings.HasSuffix(templateFile, ".gotmpl") { - buffer := &bytes.Buffer{} - if err := tpls.ExecuteTemplate(buffer, templateFile, p); err != nil { - return nil, fmt.Errorf("embed render entry template %q: %w", templateFile, err) - } - content = buffer.Bytes() - } else { - content, err = templatesFS.ReadFile("templates/" + templateFile) - if err != nil { - return nil, fmt.Errorf("reading %q: %w", templateFile, err) - } - } - - projectFiles[finalFileName] = content - } - - return -} - -//go:embed templates/* -var templatesFS embed.FS - -func ParseFS(myFuncs template.FuncMap, fsys fs.FS, pattern string) (*template.Template, error) { - t := template.New("").Funcs(myFuncs) - filenames, err := doublestar.Glob(fsys, pattern) - if err != nil { - return nil, err - } - if len(filenames) == 0 { - return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) - } - - for _, filename := range filenames { - b, err := fs.ReadFile(fsys, filename) - if err != nil { - return nil, err - } - - name, _ := strings.CutPrefix(filename, "templates/") - - _, err = t.New(name).Parse(string(b)) - if err != nil { - return nil, err - } - } - return t, nil -} diff --git a/codegen/subgraph/subgraph.go b/codegen/subgraph/subgraph.go deleted file mode 100644 index 09c3a3f2..00000000 --- a/codegen/subgraph/subgraph.go +++ /dev/null @@ -1,92 +0,0 @@ -package subgraph - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/spf13/cobra" - "github.com/streamingfast/cli/sflags" - "github.com/streamingfast/substreams/manifest" -) - -var SubgraphCmd = &cobra.Command{ - Use: "subgraph ", - Short: "Generate subgraph dev environment from substreams manifest", - Args: cobra.ExactArgs(3), - RunE: generateSubgraphEnv, -} - -func init() { - SubgraphCmd.Flags().Bool("with-dev-env", false, "generate graph node dev environment") -} - -func generateSubgraphEnv(cmd *cobra.Command, args []string) error { - manifestPath := args[0] - moduleName := args[1] - networkName := args[2] - reader, err := manifest.NewReader(manifestPath) - if err != nil { - return fmt.Errorf("manifest reader: %w", err) - } - - withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") - - pkg, _, err := reader.Read() - if err != nil { - return fmt.Errorf("read manifest %q: %w", manifestPath, err) - } - - requestedModule, err := GetModule(pkg, moduleName) - if err != nil { - return fmt.Errorf("getting module: %w", err) - } - - if pkg.GetPackageMeta()[0] == nil { - return fmt.Errorf("package meta not found") - } - - projectName := pkg.GetPackageMeta()[0].Name - - messageDescriptor, err := SearchForMessageTypeIntoPackage(pkg, requestedModule.Output.Type) - if err != nil { - return fmt.Errorf("searching for message type: %w", err) - } - - protoTypeMapping := GetExistingProtoTypes(pkg.ProtoFiles) - - entitiesMapping, err := GetProjectEntities(messageDescriptor, protoTypeMapping) - if err != nil { - panic(fmt.Errorf("getting entities: %w", err)) - } - - project := NewProject(projectName, networkName, requestedModule, messageDescriptor, entitiesMapping) - - projectFiles, err := project.Render(withDevEnv) - if err != nil { - return fmt.Errorf("rendering project files: %w", err) - } - - saveDir := "/tmp/testSubCmd2/" - - err = os.MkdirAll(saveDir, 0755) - if err != nil { - return fmt.Errorf("creating directory %s: %w", saveDir, err) - } - - for fileName, fileContent := range projectFiles { - filePath := filepath.Join(saveDir, fileName) - - err := os.MkdirAll(filepath.Dir(filePath), 0755) - if err != nil { - return fmt.Errorf("creating directory %s: %w", filepath.Dir(filePath), err) - } - - err = os.WriteFile(filePath, fileContent, 0644) - if err != nil { - return fmt.Errorf("saving file %s: %w", filePath, err) - } - } - - return nil -} diff --git a/codegen/subgraph/templates/triggers/schema.graphql.gotmpl b/codegen/subgraph/templates/triggers/schema.graphql.gotmpl deleted file mode 100644 index 63ed1a1c..00000000 --- a/codegen/subgraph/templates/triggers/schema.graphql.gotmpl +++ /dev/null @@ -1,11 +0,0 @@ -{{- range $entityName, $entity := $.GetEntities }} -type {{ $entity.NameAsEntity }} @entity { - id: ID! - {{- range $fieldName, $graphqlType := $entity.NestedEntitiesMapping }} - {{ $fieldName }}: {{ $graphqlType }} - {{- end }} - {{if $entity.HasClassicTypes }} - jsonValue: String! - {{- end -}} -} -{{- end }} \ No newline at end of file diff --git a/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl b/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl deleted file mode 100644 index db39d5c0..00000000 --- a/codegen/subgraph/templates/triggers/src/mappings.ts.gotmpl +++ /dev/null @@ -1,48 +0,0 @@ -import { log } from "@graphprotocol/graph-ts"; -import { Protobuf } from "as-proto/assembly"; -import { JSON } from "assemblyscript-json"; -{{- range $entityName, $entity := $.GetEntities }} -import { {{ $entityName}} as {{$entity.NameAsProto}}} from "./pb/{{ $entity.ProtobufPath }}"; -import { {{ $entity.NameAsEntity }} } from "../generated/schema"; -{{- end -}} - -export function handleTriggers(bytes: Uint8Array): void { - {{ $mainEntity := $.GetMainEntity }} - const $mainEntity.NameAsProto = Protobuf.decode<{{ $mainEntity.NameAsProto}}>( - bytes, - {{ $mainEntity.NameAsProto}}.decode - ); - - let mainId = crypto.keccak256(Bytes.fromUint8Array(bytes)).toHexString(); - - {{ template "entityFromProto" (arr $mainEntity "mainId") }} -} - -function ID(parentID: string, messageName: string): string { - return parentID + "-" + messageName; -} - -function IDWithIndex(parentID: string, messageName: string, index: number): string { - return parentID + "-" + messageName + "-" + index.toString(); -} - - -{{define "entityFromProto"}} - {{ $parentEntity := index . 0 }} - {{ $parentId := index . 1 }} - let $parentEntity.NameAsEntity = new {{ $parentEntity.NameAsEntity }}(parentId); - - - for nested entities - let {{ $parentId }} = ID(parentId + "nestedEntityName") - let nestedEntity = new NestedEntity(nestedEntityID) - {{ template "entityFromProto" (arr .nestedEntity nestedEntityID) }} - - //Handle classic fields - -Create entityJsonValue JSON object - -set all classic fields - -$parentEntity.NameAsEntity.jsonValue = entityJsonValue.ToString() - -save - - $parentEntity.NameAsEntity.save() -{{end}} - diff --git a/codegen/templates/sql/Makefile.gotmpl b/codegen/templates/sql/Makefile.gotmpl new file mode 100644 index 00000000..f4a1c63d --- /dev/null +++ b/codegen/templates/sql/Makefile.gotmpl @@ -0,0 +1,63 @@ +LINK := $(shell curl -s https://api.github.com/repos/streamingfast/substreams-sink-sql/releases/latest | awk '/browser_download_url.*$(shell uname -s | tr "[:upper:]" "[:lower:]")_$(shell uname -m)/ {print $$2}' | sed 's/"//g') + +.PHONY: get-sink-sql-binary +get-sink-sql-binary: +{{"\t"}}@echo "Downloading substreams-sink-sql binary..." +{{"\t"}}@mkdir -p bin +{{"\t"}}curl -L $(LINK) | tar zxf - -C bin +{{"\t"}}@rm -f bin/LICENSE +{{"\t"}}@rm -f bin/README.md +{{"\t"}}@echo "Download complete." + +{{- if .FlavorIsPostgresSQL }} +DSN:="postgres://dev-node:insecure-change-me-in-prod@localhost:5432/dev-node?sslmode=disable" +{{- end }} +{{- if .FlavorIsClickHouse }} +DSN:="clickhouse://default:@localhost:9000/default" +{{- end }} + +.PHONY: setup-sink +setup-sink: +{{"\t"}}@echo "Setting up substreams-sink-sql..." +{{"\t"}}./bin/substreams-sink-sql setup $(DSN) ./{{ .KebabName }}-v0.1.0.spkg + +.PHONY: run-sink +run-sink: +{{"\t"}}@echo "Running substreams sink sql..." +{{- if .FlavorIsPostgresSQL }} +{{"\t"}}./bin/substreams-sink-sql run $(DSN) ./{{ .KebabName }}-v0.1.0.spkg +{{- end }} +{{- if .FlavorIsClickHouse }} +{{"\t"}}./bin/substreams-sink-sql run $(DSN) ./{{ .KebabName }}-v0.1.0.spkg --undo-buffer-size 12 +{{- end }} + +.PHONY: clean-local-data +clean-local-data: +{{"\t"}}@echo "Cleaning local data..." +{{"\t"}}if [ -d ./data/postgres ]; then +{{"\t"}}rm -rf ./data/postgres +{{"\t"}}fi +{{"\t"}}@echo "Local data cleaned." + +{{- if .FlavorIsPostgresSQL }} +.PHONY: launch-postgresql-db +launch-postgresql-db: +{{"\t"}}@echo "Launching postgresql database..." +{{"\t"}}./dev-environment/start.sh +{{- end -}} + +{{- if .FlavorIsClickHouse }} +.PHONY: launch-clickhouse-db +launch-clickhouse-db: +{{"\t"}}@echo "Launching clickhouse database..." +{{"\t"}}./dev-environment/start.sh +{{- end -}} + +.PHONY: package +package: build +{{- if .FlavorIsPostgresSQL }} +{{"\t"}}substreams pack substreams.sql.yaml +{{- end -}} +{{- if .FlavorIsClickHouse }} +{{"\t"}}substreams pack substreams.clickhouse.yaml +{{- end -}} \ No newline at end of file diff --git a/codegen/templates/sql/README.md.gotmpl b/codegen/templates/sql/README.md.gotmpl new file mode 100644 index 00000000..225a1ea5 --- /dev/null +++ b/codegen/templates/sql/README.md.gotmpl @@ -0,0 +1,59 @@ +# Fill out database using substreams as source + +This repository contains all the keys to launch a local `substreams-sink-sql` instance using a substreams source. +It will allow you to fill out your database with data from the substreams source. + +## Table of Contents + - [Pre-requisites](#pre-requisites) + {{- if .FlavorIsClickHouse}} + - [Launch Clickhouse database](#launch-clickhouse-database) + {{- end}} + {{- if .FlavorIsPostgresSQL}} + - [Launch PostgresSQL database](#launch-postressql-database) + {{- end}} + - [Setup Sink](#setup-sink) + - [Run Sink](#run-sink) + +### Pre-requisites + +- Install the `substreams-sink-sql` binary using the following command: +```bash +make get-sink-sql-binary +``` +> [!NOTE] +> The `substreams-sink-sql` binary is downloaded to the `bin` directory. + +{{- if .FlavorIsClickHouse}} +### Launch Clickhouse database +Once the sink binary is installed, you can launch a local Clickhouse database using the `docker-compose-yml` in the `dev-environment` directory. +To do so, you can use the following make command: +```bash +make launch-clickhouse-db +``` +{{- end}} + +{{- if .FlavorIsPostgresSQL}} +### Launch PostgresSQL database +Once the sink binary is installed, you can launch a local PostgresSQL database using the `docker-compose-yml` in the `dev-environment` directory. +To do so, you can use the following make command: +```bash +make launch-postgresql-db +``` +{{- end}} + +### Setup Sink +After starting your database, you need to setup tour sink with a `DNS` to connect the sink to your database. You need also your build substreams package. +If you run your clickhouse database with the provided `dev-environment`, you can use the following command: + +```bash +make setup-sink +``` + +### Run Sink +Once the sink is setup, you can now run a substreams sink to fill out your database with data based on the substreams source. +Run the following command to start it: +```bash +make run-sink +``` +> [!NOTE] +> When running sink, do not forget to provide a substreams JWT Token or an API key enabling you to use the substreams source. diff --git a/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl b/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl new file mode 100644 index 00000000..5b19912b --- /dev/null +++ b/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl @@ -0,0 +1,51 @@ +version: "3" +services: + {{- if .FlavorIsPostgresSQL }} + postgres: + container_name: postgres-ssp + image: postgres:14 + ports: + - "5432:5432" + command: ["postgres", "-cshared_preload_libraries=pg_stat_statements"] + #command: ["postgres", "-cshared_preload_libraries=pg_stat_statements", "-clog_statement=all"] + environment: + POSTGRES_USER: dev-node + POSTGRES_PASSWORD: insecure-change-me-in-prod + POSTGRES_DB: dev-node + POSTGRES_INITDB_ARGS: "-E UTF8 --locale=C" + POSTGRES_HOST_AUTH_METHOD: md5 + volumes: + - ./data/postgres:/var/lib/postgresql/data + healthcheck: + test: ["CMD", "pg_isready"] + interval: 30s + timeout: 10s + retries: 15 + pgweb: + container_name: pgweb-ssp + image: sosedoff/pgweb:0.11.12 + restart: on-failure + ports: + - "8081:8081" + command: ["pgweb", "--bind=0.0.0.0", "--listen=8081", "--binary-codec=hex"] + links: + - postgres:postgres + environment: + - DATABASE_URL=postgres://dev-node:insecure-change-me-in-prod@postgres:5432/dev-node?sslmode=disable + depends_on: + - postgres + {{- end -}} + {{- if .FlavorIsClickHouse }} + database: + container_name: clickhouse-ssp + image: clickhouse/clickhouse-server:23.9 + user: "101:101" + hostname: clickhouse + volumes: + - ./data/clickhouse/clickhouse-server/config.xml:/etc/clickhouse-server/config.d/config.xml + - ./data/clickhouse/clickhouse-server/users.xml:/etc/clickhouse-server/users.d/users.xml + ports: + - "8123:8123" + - "9000:9000" + - "9005:9005" + {{- end -}} diff --git a/codegen/templates/sql/dev-environment/start.sh.gotmpl b/codegen/templates/sql/dev-environment/start.sh.gotmpl new file mode 100644 index 00000000..88729734 --- /dev/null +++ b/codegen/templates/sql/dev-environment/start.sh.gotmpl @@ -0,0 +1,65 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +clean= + +main() { + pushd "$ROOT" &> /dev/null + + while getopts "hc" opt; do + case $opt in + h) usage && exit 0;; + c) clean=true;; + \?) usage_error "Invalid option: -$OPTARG";; + esac + done + + shift $((OPTIND-1)) + + if [[ -d "./data" && $clean == true ]]; then + echo "Cleaning data directory" + rm -rf ./data 1> /dev/null + fi + + prepare + + exec docker-compose up +} +prepare() { + {{- if .FlavorIsPostgresSQL }} + if [[ ! -d "./data/postgres" ]]; then + mkdir -p ./data/postgres 1> /dev/null + fi + {{- end }} + {{- if .FlavorIsClickHouse }} + if [[ ! -d "./data/clickhouse" ]]; then + mkdir -p ./data/clickhouse 1> /dev/null + fi + {{- end }} +} + +usage_error() { + message="$1" + exit_code="$2" + + echo "ERROR: $message" + echo "" + usage + exit ${exit_code:-1} +} + +usage() { + echo "usage: up [-c]" + echo "" + echo "Setup required files layout and launch 'docker compose up'" + echo "spinning up all required development dependencies." + echo "" + echo "Options" + echo " -c Clean 'data' directory before launching dependencies" + echo " -h Display help about this script" +} + +main "$@" diff --git a/codegen/templates/sql/run-local.sh.gotmpl b/codegen/templates/sql/run-local.sh.gotmpl new file mode 100644 index 00000000..e4a0cc7d --- /dev/null +++ b/codegen/templates/sql/run-local.sh.gotmpl @@ -0,0 +1,50 @@ +#!/usr/bin/env bash + +set -e + +ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +if [[ -z $SUBSTREAMS_API_TOKEN ]]; then + echo "Please set SUBSTREAMS_API_TOKEN in your environment" + exit 1 +fi + +if [[ -z $DATABASE_DSN ]]; then + DATABASE_DSN={{ if eq $.SqlOutputFlavor "clickhouse" }}"clickhouse://default:@localhost:9000/default"{{ else }}"postgres://dev-node:insecure-change-me-in-prod@localhost:5432/dev-node?sslmode=disable"{{ end }} +fi + +if [ ! -f {{ $.KebabName }}-v0.1.0.spkg ]; then + echo "Missing {{ $.KebabName }}-v0.1.0.spkg file. Please build the substreams first. (unzip substreams_src.zip and run 'make build' in the substreams directory, then copy the file here)" + exit 1 +fi + +echo "" +echo "----- Running docker environment -----" +echo "" +sleep 1 +docker compose -f $ROOT/dev-environment/docker-compose.yml up -d --wait + +echo "" +echo "----- Getting a fresh copy of substreams-sink-sql under ./bin -----" +echo "" +make get-sink-sql-binary + +echo "" +echo "----- Set up substreams-sink-sql -----" +echo "" +sleep 1 +make setup-sink + +echo "" +echo "----- Run substreams-sink-sql -----" +{{- if eq $.SqlOutputFlavor "clickhouse" }} +echo "Run the following commands in another terminal" +echo "docker exec -it {docker_id} sh" +echo "clickhouse-client" +echo "select * from events;" +{{ else}} +echo "Navigate to http://localhost:8081 and run SQL queries" +{{- end }} +echo "" +sleep 1 +make run-sink diff --git a/codegen/templates/sql/schema.clickhouse.sql.gotmpl b/codegen/templates/sql/schema.clickhouse.sql.gotmpl new file mode 100644 index 00000000..63956658 --- /dev/null +++ b/codegen/templates/sql/schema.clickhouse.sql.gotmpl @@ -0,0 +1,7 @@ +{{ if $.HasExampleEntity }} +CREATE TABLE IF NOT EXISTS {{ $contract.Identifier }}_{{ $rust.TableChangeEntityName }} ( +) ENGINE = MergeTree PRIMARY KEY ("evt_tx_hash","evt_index"); +{{- else }} +CREATE TABLE IF NOT EXISTS MyEntity ( +) ENGINE = MergeTree PRIMARY KEY ("evt_tx_hash","evt_index"); +{{- end }} \ No newline at end of file diff --git a/codegen/templates/sql/schema.sql.gotmpl b/codegen/templates/sql/schema.sql.gotmpl new file mode 100644 index 00000000..0b2484f5 --- /dev/null +++ b/codegen/templates/sql/schema.sql.gotmpl @@ -0,0 +1,3 @@ +CREATE TABLE IF NOT EXISTS {{ $.ExampleEntity.NameAsEntity }} ( + PRIMARY KEY(evt_tx_hash,evt_index) +); diff --git a/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl b/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl new file mode 100644 index 00000000..f4c5e4ca --- /dev/null +++ b/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl @@ -0,0 +1,12 @@ +{{ template "substreams.yaml.gotmpl" . }} + +sink: + module: db_out + type: sf.substreams.sink.sql.v1.Service + config: + schema: "./schema.sql" + engine: clickhouse + postgraphile_frontend: + enabled: false + rest_frontend: + enabled: false diff --git a/codegen/templates/sql/substreams.sql.yaml.gotmpl b/codegen/templates/sql/substreams.sql.yaml.gotmpl new file mode 100644 index 00000000..9b4611ba --- /dev/null +++ b/codegen/templates/sql/substreams.sql.yaml.gotmpl @@ -0,0 +1,10 @@ +{{ template "substreams.yaml.gotmpl" . }} + +sink: + module: db_out + type: sf.substreams.sink.sql.v1.Service + config: + schema: "./schema.sql" + engine: postgres + postgraphile_frontend: + enabled: true diff --git a/codegen/templates/sql/substreams.yaml.gotmpl b/codegen/templates/sql/substreams.yaml.gotmpl new file mode 100644 index 00000000..2d2097eb --- /dev/null +++ b/codegen/templates/sql/substreams.yaml.gotmpl @@ -0,0 +1,3 @@ +specVersion: v0.1.0 +package: + name: {{ .ProjectName }} diff --git a/codegen/subgraph/templates/triggers/Makefile.gotmpl b/codegen/templates/triggers/Makefile.gotmpl similarity index 100% rename from codegen/subgraph/templates/triggers/Makefile.gotmpl rename to codegen/templates/triggers/Makefile.gotmpl diff --git a/codegen/subgraph/templates/triggers/README.md b/codegen/templates/triggers/README.md similarity index 100% rename from codegen/subgraph/templates/triggers/README.md rename to codegen/templates/triggers/README.md diff --git a/codegen/subgraph/templates/triggers/buf.gen.yaml b/codegen/templates/triggers/buf.gen.yaml similarity index 100% rename from codegen/subgraph/templates/triggers/buf.gen.yaml rename to codegen/templates/triggers/buf.gen.yaml diff --git a/codegen/subgraph/templates/triggers/dev-environment/config.toml.gotmpl b/codegen/templates/triggers/dev-environment/config.toml.gotmpl similarity index 100% rename from codegen/subgraph/templates/triggers/dev-environment/config.toml.gotmpl rename to codegen/templates/triggers/dev-environment/config.toml.gotmpl diff --git a/codegen/subgraph/templates/triggers/dev-environment/docker-compose.yml b/codegen/templates/triggers/dev-environment/docker-compose.yml similarity index 100% rename from codegen/subgraph/templates/triggers/dev-environment/docker-compose.yml rename to codegen/templates/triggers/dev-environment/docker-compose.yml diff --git a/codegen/subgraph/templates/triggers/dev-environment/start.sh b/codegen/templates/triggers/dev-environment/start.sh similarity index 100% rename from codegen/subgraph/templates/triggers/dev-environment/start.sh rename to codegen/templates/triggers/dev-environment/start.sh diff --git a/codegen/subgraph/templates/triggers/package.json.gotmpl b/codegen/templates/triggers/package.json.gotmpl similarity index 90% rename from codegen/subgraph/templates/triggers/package.json.gotmpl rename to codegen/templates/triggers/package.json.gotmpl index 48fc93cf..4281dbda 100644 --- a/codegen/subgraph/templates/triggers/package.json.gotmpl +++ b/codegen/templates/triggers/package.json.gotmpl @@ -5,7 +5,7 @@ "codegen": "graph codegen", "build": "graph build", "publish": "graph publish", - "generate": "buf generate --type=\"{{ $.GetModuleOutputProtoPath}}\" {{ $.SubstreamsKebabName }}-v0.1.0.spkg#format=bin", + "generate": "buf generate --type=\"{{ $.GetOutputProtoPath}}\" {{ $.SubstreamsKebabName }}-v0.1.0.spkg#format=bin", "deploy": "graph deploy --node https://api.studio.thegraph.com/deploy/ {{ .Name }}", "create-local": "graph create --node http://localhost:8020/ {{ .Name }}", "remove-local": "graph remove --node http://localhost:8020/ {{ .Name }}", diff --git a/codegen/subgraph/templates/triggers/run-local.sh.gotmpl b/codegen/templates/triggers/run-local.sh.gotmpl similarity index 100% rename from codegen/subgraph/templates/triggers/run-local.sh.gotmpl rename to codegen/templates/triggers/run-local.sh.gotmpl diff --git a/codegen/templates/triggers/schema.graphql.gotmpl b/codegen/templates/triggers/schema.graphql.gotmpl new file mode 100644 index 00000000..9284c383 --- /dev/null +++ b/codegen/templates/triggers/schema.graphql.gotmpl @@ -0,0 +1,11 @@ +{{if $.HasExampleEntity }} +type {{ $.ExampleEntity.NameAsEntity }} @entity { + id: ID! + count: Int! +} +{{ else }} +type MyEntity @entity { + id: ID! + count: Int! +} +{{- end }} \ No newline at end of file diff --git a/codegen/templates/triggers/src/mappings.ts.gotmpl b/codegen/templates/triggers/src/mappings.ts.gotmpl new file mode 100644 index 00000000..17fea31f --- /dev/null +++ b/codegen/templates/triggers/src/mappings.ts.gotmpl @@ -0,0 +1,38 @@ +import { Protobuf } from "as-proto/assembly"; +import { {{ $.OutputName}} as {{ $.ProtoOutputName }} } from "./pb/{{ $.GetOutputProtobufPath }}"; +{{- if $.HasExampleEntity }} +import { {{ $.ExampleEntity.NameAsEntity }} } from "../generated/schema"; +{{- else }} +import { MyEntity } from "../generated/schema"; +{{- end }} + +export function handleTriggers(bytes: Uint8Array): void { + // Decode the protobuf message received from the {{ $.GetModuleName }} substreams module + const {{ $.ProtoOutputNameToSnake }} = Protobuf.decode<{{ $.ProtoOutputName }}>(bytes, {{ $.ProtoOutputName}}.decode); + {{ if $.HasExampleEntity }} + for (let i=0; i<{{$.ProtoOutputNameToSnake}}.{{ $.ExampleEntity.NameAsProtoField }}.length; i++) { + const {{ toLower $.ExampleEntity.Name }} = {{ $.ProtoOutputNameToSnake }}.{{ $.ExampleEntity.NameAsProtoField }}[i]; + {{ if $.ExampleEntityHasID }} + //Create a new event from {{ $.ExampleEntity.ID}} as ID + const myEntity = new {{ $.ExampleEntity.NameAsEntity }}({{ toLower $.ExampleEntity.Name }}.{{ $.ExampleEntity.ID + i}}); + {{ else }} + //No ID field was found for {{ $.ExampleEntity.Name }}, we are using index as ID + const myEntity = new {{ $.ExampleEntity.NameAsEntity }}(i); + {{- end }} + myEntity.save(); + } + + {{- else}} + if (MyEntity.load("1") == null) { + const myEntity = new MyEntity(); + myEntity.count = 1; + myEntity.save(); + } else { + let myEntity := MyEntity.load("1") + myEntity.count += 1 + myEntity.save() + } + {{- end }} +} + + diff --git a/codegen/subgraph/templates/triggers/subgraph.yaml.gotmpl b/codegen/templates/triggers/subgraph.yaml.gotmpl similarity index 100% rename from codegen/subgraph/templates/triggers/subgraph.yaml.gotmpl rename to codegen/templates/triggers/subgraph.yaml.gotmpl diff --git a/codegen/subgraph/templates/triggers/tsconfig.json b/codegen/templates/triggers/tsconfig.json similarity index 100% rename from codegen/subgraph/templates/triggers/tsconfig.json rename to codegen/templates/triggers/tsconfig.json diff --git a/go.mod b/go.mod index 4f224643..669cc866 100644 --- a/go.mod +++ b/go.mod @@ -42,6 +42,7 @@ require ( github.com/charmbracelet/lipgloss v0.11.0 github.com/docker/cli v24.0.6+incompatible github.com/dustin/go-humanize v1.0.1 + github.com/golang-cz/textcase v1.2.1 github.com/google/uuid v1.6.0 github.com/hashicorp/go-multierror v1.1.1 github.com/ipfs/go-ipfs-api v0.6.0 diff --git a/go.sum b/go.sum index c0b7e5de..0128050b 100644 --- a/go.sum +++ b/go.sum @@ -261,6 +261,8 @@ github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/me github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-cz/textcase v1.2.1 h1:0xRtKo+abtJojre5ONjuMzyg9fSfiKBj5bWZ6fpTYxI= +github.com/golang-cz/textcase v1.2.1/go.mod h1:aWsQknYwxtTS2zSCrGGoRIsxmzjsHomRqLeMeVb+SKU= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= From 2f235fc7026e82317568b08e865ecaca06e26270 Mon Sep 17 00:00:00 2001 From: arnaudberger Date: Fri, 26 Jul 2024 11:47:30 -0400 Subject: [PATCH 4/6] add some improvement --- codegen/helpers.go | 13 ++-- codegen/project.go | 22 +++++-- codegen/sql.go | 5 +- codegen/subgraph.go | 20 ++++-- codegen/templates/sql/Makefile.gotmpl | 63 ------------------ codegen/templates/sql/README.md.gotmpl | 59 ----------------- .../dev-environment/docker-compose.yml.gotmpl | 51 --------------- .../sql/dev-environment/start.sh.gotmpl | 65 ------------------- codegen/templates/sql/run-local.sh.gotmpl | 50 -------------- .../sql/schema.clickhouse.sql.gotmpl | 7 -- codegen/templates/sql/schema.sql.gotmpl | 3 - .../sql/substreams.clickhouse.yaml.gotmpl | 12 ---- .../templates/sql/substreams.sql.yaml.gotmpl | 10 --- codegen/templates/sql/substreams.yaml.gotmpl | 3 - codegen/templates/triggers/Makefile.gotmpl | 1 - .../templates/triggers/src/mappings.ts.gotmpl | 23 ++++--- .../templates/triggers/subgraph.yaml.gotmpl | 4 +- manifest/manifest.go | 15 ++--- 18 files changed, 63 insertions(+), 363 deletions(-) delete mode 100644 codegen/templates/sql/Makefile.gotmpl delete mode 100644 codegen/templates/sql/README.md.gotmpl delete mode 100644 codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl delete mode 100644 codegen/templates/sql/dev-environment/start.sh.gotmpl delete mode 100644 codegen/templates/sql/run-local.sh.gotmpl delete mode 100644 codegen/templates/sql/schema.clickhouse.sql.gotmpl delete mode 100644 codegen/templates/sql/schema.sql.gotmpl delete mode 100644 codegen/templates/sql/substreams.clickhouse.yaml.gotmpl delete mode 100644 codegen/templates/sql/substreams.sql.yaml.gotmpl delete mode 100644 codegen/templates/sql/substreams.yaml.gotmpl delete mode 100644 codegen/templates/triggers/Makefile.gotmpl diff --git a/codegen/helpers.go b/codegen/helpers.go index 4394dd05..f76d1f3e 100644 --- a/codegen/helpers.go +++ b/codegen/helpers.go @@ -75,13 +75,13 @@ func getExistingProtoTypes(protoFiles []*descriptorpb.FileDescriptorProto) map[s func processMessage(message *descriptorpb.DescriptorProto, parentName string, protoTypeMapping map[string]*descriptorpb.DescriptorProto) { for _, nestedMessage := range message.NestedType { - currentName := "." + parentName + "." + nestedMessage.GetName() + currentName := parentName + "." + nestedMessage.GetName() protoTypeMapping[currentName] = nestedMessage processMessage(nestedMessage, currentName, protoTypeMapping) } } -func buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputType string, withDevEnv bool) error { +func buildGenerateCommandFromArgs(manifestPath, moduleName, outputType string, withDevEnv bool) error { reader, err := manifest.NewReader(manifestPath) if err != nil { return fmt.Errorf("manifest reader: %w", err) @@ -115,7 +115,12 @@ func buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputT } protoTypeMapping := getExistingProtoTypes(pkg.ProtoFiles) - project := NewProject(projectName, networkName, requestedModule, messageDescriptor, protoTypeMapping) + + if pkg.Network == "" { + return fmt.Errorf("network not found in your manifest file") + } + + project := NewProject(projectName, pkg.Network, requestedModule, messageDescriptor, protoTypeMapping) // Create an example entity from the output descriptor project.BuildExampleEntity() @@ -139,7 +144,7 @@ func buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputT } func createSaveDirForm() (string, error) { - saveDir := "output_sps" + saveDir := "subgraph" if cwd, err := os.Getwd(); err == nil { saveDir = filepath.Join(cwd, saveDir) } diff --git a/codegen/project.go b/codegen/project.go index e3b0a7a2..7cdfe457 100644 --- a/codegen/project.go +++ b/codegen/project.go @@ -41,7 +41,7 @@ func NewProject(name, network string, module *pbsubstreams.Module, outputDescrip } } -func (p *Project) BuildExampleEntity() { +func (p *Project) BuildExampleEntity() error { for _, field := range p.OutputDescriptor.Field { if *field.Type == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE { if *field.Label == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { @@ -49,11 +49,15 @@ func (p *Project) BuildExampleEntity() { name := splitMessagePath[len(splitMessagePath)-1] p.ExampleEntity = &ExampleEntity{ - NameAsProtoField: field.GetName(), + NameAsProtoField: textcase.CamelCase(field.GetName()), NameAsEntity: "My" + name, Name: name, } + if p.protoTypeMapping[*field.TypeName] == nil { + return fmt.Errorf("nested message type: %q not found", *field.TypeName) + } + for _, nestedMessageField := range p.protoTypeMapping[*field.TypeName].Field { switch *nestedMessageField.Type { case descriptorpb.FieldDescriptorProto_TYPE_STRING, descriptorpb.FieldDescriptorProto_TYPE_INT64, descriptorpb.FieldDescriptorProto_TYPE_UINT64, descriptorpb.FieldDescriptorProto_TYPE_UINT32, descriptorpb.FieldDescriptorProto_TYPE_BYTES: @@ -65,7 +69,7 @@ func (p *Project) BuildExampleEntity() { } } } - return + return nil } func (p *Project) ExampleEntityHasID() bool { @@ -106,7 +110,12 @@ func (p *Project) GetOutputProtobufPath() string { return protobufPath } -func (p *Project) ChainEndpoint() string { return ChainConfigByID[p.Network].FirehoseEndpoint } +func (p *Project) ChainEndpoint() (string, error) { + if ChainConfigByID[p.Network] == nil { + return "", fmt.Errorf("network %q not found", p.Network) + } + return ChainConfigByID[p.Network].FirehoseEndpoint, nil +} func (p *Project) Render(outputType string, withDevEnv bool) (projectFiles map[string][]byte, err error) { projectFiles = map[string][]byte{} @@ -115,7 +124,9 @@ func (p *Project) Render(outputType string, withDevEnv bool) (projectFiles map[s "arr": func(els ...any) []any { return els }, - "toLower": strings.ToLower, + "toLower": strings.ToLower, + "toCamelCase": textcase.CamelCase, + "toKebabCase": textcase.KebabCase, } tpls, err := ParseFS(funcMap, templatesFS, "**/*.gotmpl") @@ -127,7 +138,6 @@ func (p *Project) Render(outputType string, withDevEnv bool) (projectFiles map[s switch outputType { case outputTypeSubgraph: templateFiles = map[string]string{ - "triggers/Makefile.gotmpl": "Makefile", "triggers/buf.gen.yaml": "buf.gen.yaml", "triggers/package.json.gotmpl": "package.json", "triggers/tsconfig.json": "tsconfig.json", diff --git a/codegen/sql.go b/codegen/sql.go index 70a0ad1e..c2355c81 100644 --- a/codegen/sql.go +++ b/codegen/sql.go @@ -8,7 +8,7 @@ import ( ) var SQLCmd = &cobra.Command{ - Use: "subgraph ", + Use: "subgraph ", Short: "Generate subgraph dev environment from substreams manifest", Args: cobra.ExactArgs(3), RunE: generateSQLEnv, @@ -17,10 +17,9 @@ var SQLCmd = &cobra.Command{ func generateSQLEnv(cmd *cobra.Command, args []string) error { manifestPath := args[0] moduleName := args[1] - networkName := args[2] withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") - err := buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputTypeSubgraph, withDevEnv) + err := buildGenerateCommandFromArgs(manifestPath, moduleName, outputTypeSubgraph, withDevEnv) if err != nil { return fmt.Errorf("building generate command: %w", err) } diff --git a/codegen/subgraph.go b/codegen/subgraph.go index c5bf291d..8cb19acf 100644 --- a/codegen/subgraph.go +++ b/codegen/subgraph.go @@ -8,19 +8,27 @@ import ( ) var SubgraphCmd = &cobra.Command{ - Use: "subgraph ", + Use: "subgraph [] ", Short: "Generate subgraph dev environment from substreams manifest", - Args: cobra.ExactArgs(3), + Args: cobra.RangeArgs(1, 2), RunE: generateSubgraphEnv, } func generateSubgraphEnv(cmd *cobra.Command, args []string) error { - manifestPath := args[0] - moduleName := args[1] - networkName := args[2] + manifestPath := "" + moduleName := "" + if len(args) == 2 { + manifestPath = args[0] + moduleName = args[1] + } + + if len(args) == 1 { + moduleName = args[0] + } + withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") - err := buildGenerateCommandFromArgs(manifestPath, moduleName, networkName, outputTypeSubgraph, withDevEnv) + err := buildGenerateCommandFromArgs(manifestPath, moduleName, outputTypeSubgraph, withDevEnv) if err != nil { return fmt.Errorf("building generate command: %w", err) } diff --git a/codegen/templates/sql/Makefile.gotmpl b/codegen/templates/sql/Makefile.gotmpl deleted file mode 100644 index f4a1c63d..00000000 --- a/codegen/templates/sql/Makefile.gotmpl +++ /dev/null @@ -1,63 +0,0 @@ -LINK := $(shell curl -s https://api.github.com/repos/streamingfast/substreams-sink-sql/releases/latest | awk '/browser_download_url.*$(shell uname -s | tr "[:upper:]" "[:lower:]")_$(shell uname -m)/ {print $$2}' | sed 's/"//g') - -.PHONY: get-sink-sql-binary -get-sink-sql-binary: -{{"\t"}}@echo "Downloading substreams-sink-sql binary..." -{{"\t"}}@mkdir -p bin -{{"\t"}}curl -L $(LINK) | tar zxf - -C bin -{{"\t"}}@rm -f bin/LICENSE -{{"\t"}}@rm -f bin/README.md -{{"\t"}}@echo "Download complete." - -{{- if .FlavorIsPostgresSQL }} -DSN:="postgres://dev-node:insecure-change-me-in-prod@localhost:5432/dev-node?sslmode=disable" -{{- end }} -{{- if .FlavorIsClickHouse }} -DSN:="clickhouse://default:@localhost:9000/default" -{{- end }} - -.PHONY: setup-sink -setup-sink: -{{"\t"}}@echo "Setting up substreams-sink-sql..." -{{"\t"}}./bin/substreams-sink-sql setup $(DSN) ./{{ .KebabName }}-v0.1.0.spkg - -.PHONY: run-sink -run-sink: -{{"\t"}}@echo "Running substreams sink sql..." -{{- if .FlavorIsPostgresSQL }} -{{"\t"}}./bin/substreams-sink-sql run $(DSN) ./{{ .KebabName }}-v0.1.0.spkg -{{- end }} -{{- if .FlavorIsClickHouse }} -{{"\t"}}./bin/substreams-sink-sql run $(DSN) ./{{ .KebabName }}-v0.1.0.spkg --undo-buffer-size 12 -{{- end }} - -.PHONY: clean-local-data -clean-local-data: -{{"\t"}}@echo "Cleaning local data..." -{{"\t"}}if [ -d ./data/postgres ]; then -{{"\t"}}rm -rf ./data/postgres -{{"\t"}}fi -{{"\t"}}@echo "Local data cleaned." - -{{- if .FlavorIsPostgresSQL }} -.PHONY: launch-postgresql-db -launch-postgresql-db: -{{"\t"}}@echo "Launching postgresql database..." -{{"\t"}}./dev-environment/start.sh -{{- end -}} - -{{- if .FlavorIsClickHouse }} -.PHONY: launch-clickhouse-db -launch-clickhouse-db: -{{"\t"}}@echo "Launching clickhouse database..." -{{"\t"}}./dev-environment/start.sh -{{- end -}} - -.PHONY: package -package: build -{{- if .FlavorIsPostgresSQL }} -{{"\t"}}substreams pack substreams.sql.yaml -{{- end -}} -{{- if .FlavorIsClickHouse }} -{{"\t"}}substreams pack substreams.clickhouse.yaml -{{- end -}} \ No newline at end of file diff --git a/codegen/templates/sql/README.md.gotmpl b/codegen/templates/sql/README.md.gotmpl deleted file mode 100644 index 225a1ea5..00000000 --- a/codegen/templates/sql/README.md.gotmpl +++ /dev/null @@ -1,59 +0,0 @@ -# Fill out database using substreams as source - -This repository contains all the keys to launch a local `substreams-sink-sql` instance using a substreams source. -It will allow you to fill out your database with data from the substreams source. - -## Table of Contents - - [Pre-requisites](#pre-requisites) - {{- if .FlavorIsClickHouse}} - - [Launch Clickhouse database](#launch-clickhouse-database) - {{- end}} - {{- if .FlavorIsPostgresSQL}} - - [Launch PostgresSQL database](#launch-postressql-database) - {{- end}} - - [Setup Sink](#setup-sink) - - [Run Sink](#run-sink) - -### Pre-requisites - -- Install the `substreams-sink-sql` binary using the following command: -```bash -make get-sink-sql-binary -``` -> [!NOTE] -> The `substreams-sink-sql` binary is downloaded to the `bin` directory. - -{{- if .FlavorIsClickHouse}} -### Launch Clickhouse database -Once the sink binary is installed, you can launch a local Clickhouse database using the `docker-compose-yml` in the `dev-environment` directory. -To do so, you can use the following make command: -```bash -make launch-clickhouse-db -``` -{{- end}} - -{{- if .FlavorIsPostgresSQL}} -### Launch PostgresSQL database -Once the sink binary is installed, you can launch a local PostgresSQL database using the `docker-compose-yml` in the `dev-environment` directory. -To do so, you can use the following make command: -```bash -make launch-postgresql-db -``` -{{- end}} - -### Setup Sink -After starting your database, you need to setup tour sink with a `DNS` to connect the sink to your database. You need also your build substreams package. -If you run your clickhouse database with the provided `dev-environment`, you can use the following command: - -```bash -make setup-sink -``` - -### Run Sink -Once the sink is setup, you can now run a substreams sink to fill out your database with data based on the substreams source. -Run the following command to start it: -```bash -make run-sink -``` -> [!NOTE] -> When running sink, do not forget to provide a substreams JWT Token or an API key enabling you to use the substreams source. diff --git a/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl b/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl deleted file mode 100644 index 5b19912b..00000000 --- a/codegen/templates/sql/dev-environment/docker-compose.yml.gotmpl +++ /dev/null @@ -1,51 +0,0 @@ -version: "3" -services: - {{- if .FlavorIsPostgresSQL }} - postgres: - container_name: postgres-ssp - image: postgres:14 - ports: - - "5432:5432" - command: ["postgres", "-cshared_preload_libraries=pg_stat_statements"] - #command: ["postgres", "-cshared_preload_libraries=pg_stat_statements", "-clog_statement=all"] - environment: - POSTGRES_USER: dev-node - POSTGRES_PASSWORD: insecure-change-me-in-prod - POSTGRES_DB: dev-node - POSTGRES_INITDB_ARGS: "-E UTF8 --locale=C" - POSTGRES_HOST_AUTH_METHOD: md5 - volumes: - - ./data/postgres:/var/lib/postgresql/data - healthcheck: - test: ["CMD", "pg_isready"] - interval: 30s - timeout: 10s - retries: 15 - pgweb: - container_name: pgweb-ssp - image: sosedoff/pgweb:0.11.12 - restart: on-failure - ports: - - "8081:8081" - command: ["pgweb", "--bind=0.0.0.0", "--listen=8081", "--binary-codec=hex"] - links: - - postgres:postgres - environment: - - DATABASE_URL=postgres://dev-node:insecure-change-me-in-prod@postgres:5432/dev-node?sslmode=disable - depends_on: - - postgres - {{- end -}} - {{- if .FlavorIsClickHouse }} - database: - container_name: clickhouse-ssp - image: clickhouse/clickhouse-server:23.9 - user: "101:101" - hostname: clickhouse - volumes: - - ./data/clickhouse/clickhouse-server/config.xml:/etc/clickhouse-server/config.d/config.xml - - ./data/clickhouse/clickhouse-server/users.xml:/etc/clickhouse-server/users.d/users.xml - ports: - - "8123:8123" - - "9000:9000" - - "9005:9005" - {{- end -}} diff --git a/codegen/templates/sql/dev-environment/start.sh.gotmpl b/codegen/templates/sql/dev-environment/start.sh.gotmpl deleted file mode 100644 index 88729734..00000000 --- a/codegen/templates/sql/dev-environment/start.sh.gotmpl +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env bash - -set -e - -ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -clean= - -main() { - pushd "$ROOT" &> /dev/null - - while getopts "hc" opt; do - case $opt in - h) usage && exit 0;; - c) clean=true;; - \?) usage_error "Invalid option: -$OPTARG";; - esac - done - - shift $((OPTIND-1)) - - if [[ -d "./data" && $clean == true ]]; then - echo "Cleaning data directory" - rm -rf ./data 1> /dev/null - fi - - prepare - - exec docker-compose up -} -prepare() { - {{- if .FlavorIsPostgresSQL }} - if [[ ! -d "./data/postgres" ]]; then - mkdir -p ./data/postgres 1> /dev/null - fi - {{- end }} - {{- if .FlavorIsClickHouse }} - if [[ ! -d "./data/clickhouse" ]]; then - mkdir -p ./data/clickhouse 1> /dev/null - fi - {{- end }} -} - -usage_error() { - message="$1" - exit_code="$2" - - echo "ERROR: $message" - echo "" - usage - exit ${exit_code:-1} -} - -usage() { - echo "usage: up [-c]" - echo "" - echo "Setup required files layout and launch 'docker compose up'" - echo "spinning up all required development dependencies." - echo "" - echo "Options" - echo " -c Clean 'data' directory before launching dependencies" - echo " -h Display help about this script" -} - -main "$@" diff --git a/codegen/templates/sql/run-local.sh.gotmpl b/codegen/templates/sql/run-local.sh.gotmpl deleted file mode 100644 index e4a0cc7d..00000000 --- a/codegen/templates/sql/run-local.sh.gotmpl +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env bash - -set -e - -ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -if [[ -z $SUBSTREAMS_API_TOKEN ]]; then - echo "Please set SUBSTREAMS_API_TOKEN in your environment" - exit 1 -fi - -if [[ -z $DATABASE_DSN ]]; then - DATABASE_DSN={{ if eq $.SqlOutputFlavor "clickhouse" }}"clickhouse://default:@localhost:9000/default"{{ else }}"postgres://dev-node:insecure-change-me-in-prod@localhost:5432/dev-node?sslmode=disable"{{ end }} -fi - -if [ ! -f {{ $.KebabName }}-v0.1.0.spkg ]; then - echo "Missing {{ $.KebabName }}-v0.1.0.spkg file. Please build the substreams first. (unzip substreams_src.zip and run 'make build' in the substreams directory, then copy the file here)" - exit 1 -fi - -echo "" -echo "----- Running docker environment -----" -echo "" -sleep 1 -docker compose -f $ROOT/dev-environment/docker-compose.yml up -d --wait - -echo "" -echo "----- Getting a fresh copy of substreams-sink-sql under ./bin -----" -echo "" -make get-sink-sql-binary - -echo "" -echo "----- Set up substreams-sink-sql -----" -echo "" -sleep 1 -make setup-sink - -echo "" -echo "----- Run substreams-sink-sql -----" -{{- if eq $.SqlOutputFlavor "clickhouse" }} -echo "Run the following commands in another terminal" -echo "docker exec -it {docker_id} sh" -echo "clickhouse-client" -echo "select * from events;" -{{ else}} -echo "Navigate to http://localhost:8081 and run SQL queries" -{{- end }} -echo "" -sleep 1 -make run-sink diff --git a/codegen/templates/sql/schema.clickhouse.sql.gotmpl b/codegen/templates/sql/schema.clickhouse.sql.gotmpl deleted file mode 100644 index 63956658..00000000 --- a/codegen/templates/sql/schema.clickhouse.sql.gotmpl +++ /dev/null @@ -1,7 +0,0 @@ -{{ if $.HasExampleEntity }} -CREATE TABLE IF NOT EXISTS {{ $contract.Identifier }}_{{ $rust.TableChangeEntityName }} ( -) ENGINE = MergeTree PRIMARY KEY ("evt_tx_hash","evt_index"); -{{- else }} -CREATE TABLE IF NOT EXISTS MyEntity ( -) ENGINE = MergeTree PRIMARY KEY ("evt_tx_hash","evt_index"); -{{- end }} \ No newline at end of file diff --git a/codegen/templates/sql/schema.sql.gotmpl b/codegen/templates/sql/schema.sql.gotmpl deleted file mode 100644 index 0b2484f5..00000000 --- a/codegen/templates/sql/schema.sql.gotmpl +++ /dev/null @@ -1,3 +0,0 @@ -CREATE TABLE IF NOT EXISTS {{ $.ExampleEntity.NameAsEntity }} ( - PRIMARY KEY(evt_tx_hash,evt_index) -); diff --git a/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl b/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl deleted file mode 100644 index f4c5e4ca..00000000 --- a/codegen/templates/sql/substreams.clickhouse.yaml.gotmpl +++ /dev/null @@ -1,12 +0,0 @@ -{{ template "substreams.yaml.gotmpl" . }} - -sink: - module: db_out - type: sf.substreams.sink.sql.v1.Service - config: - schema: "./schema.sql" - engine: clickhouse - postgraphile_frontend: - enabled: false - rest_frontend: - enabled: false diff --git a/codegen/templates/sql/substreams.sql.yaml.gotmpl b/codegen/templates/sql/substreams.sql.yaml.gotmpl deleted file mode 100644 index 9b4611ba..00000000 --- a/codegen/templates/sql/substreams.sql.yaml.gotmpl +++ /dev/null @@ -1,10 +0,0 @@ -{{ template "substreams.yaml.gotmpl" . }} - -sink: - module: db_out - type: sf.substreams.sink.sql.v1.Service - config: - schema: "./schema.sql" - engine: postgres - postgraphile_frontend: - enabled: true diff --git a/codegen/templates/sql/substreams.yaml.gotmpl b/codegen/templates/sql/substreams.yaml.gotmpl deleted file mode 100644 index 2d2097eb..00000000 --- a/codegen/templates/sql/substreams.yaml.gotmpl +++ /dev/null @@ -1,3 +0,0 @@ -specVersion: v0.1.0 -package: - name: {{ .ProjectName }} diff --git a/codegen/templates/triggers/Makefile.gotmpl b/codegen/templates/triggers/Makefile.gotmpl deleted file mode 100644 index 96efb8bb..00000000 --- a/codegen/templates/triggers/Makefile.gotmpl +++ /dev/null @@ -1 +0,0 @@ -//Not implemented yet \ No newline at end of file diff --git a/codegen/templates/triggers/src/mappings.ts.gotmpl b/codegen/templates/triggers/src/mappings.ts.gotmpl index 17fea31f..7d49b409 100644 --- a/codegen/templates/triggers/src/mappings.ts.gotmpl +++ b/codegen/templates/triggers/src/mappings.ts.gotmpl @@ -13,24 +13,27 @@ export function handleTriggers(bytes: Uint8Array): void { for (let i=0; i<{{$.ProtoOutputNameToSnake}}.{{ $.ExampleEntity.NameAsProtoField }}.length; i++) { const {{ toLower $.ExampleEntity.Name }} = {{ $.ProtoOutputNameToSnake }}.{{ $.ExampleEntity.NameAsProtoField }}[i]; {{ if $.ExampleEntityHasID }} - //Create a new event from {{ $.ExampleEntity.ID}} as ID - const myEntity = new {{ $.ExampleEntity.NameAsEntity }}({{ toLower $.ExampleEntity.Name }}.{{ $.ExampleEntity.ID + i}}); + //Create a new {{ toLower $.ExampleEntity.Name }} using {{ $.ExampleEntity.ID}} as ID + let my_entity_id: string = ({{ toLower $.ExampleEntity.Name }}.{{$.ExampleEntity.ID }} + i).toString(); + const my_entity = new {{ $.ExampleEntity.NameAsEntity }}(my_entity_id); + my_entity.count = 1; {{ else }} //No ID field was found for {{ $.ExampleEntity.Name }}, we are using index as ID - const myEntity = new {{ $.ExampleEntity.NameAsEntity }}(i); + const my_entity = new {{ $.ExampleEntity.NameAsEntity }}(i); + my_entity.count = 1; {{- end }} - myEntity.save(); + my_entity.save(); } {{- else}} if (MyEntity.load("1") == null) { - const myEntity = new MyEntity(); - myEntity.count = 1; - myEntity.save(); + const my_entity = new MyEntity(); + my_entity.count = 1; + my_entity.save(); } else { - let myEntity := MyEntity.load("1") - myEntity.count += 1 - myEntity.save() + let my_entity := MyEntity.load("1") + my_entity.count += 1 + my_entity.save() } {{- end }} } diff --git a/codegen/templates/triggers/subgraph.yaml.gotmpl b/codegen/templates/triggers/subgraph.yaml.gotmpl index 781f3d30..815af4e1 100644 --- a/codegen/templates/triggers/subgraph.yaml.gotmpl +++ b/codegen/templates/triggers/subgraph.yaml.gotmpl @@ -1,12 +1,12 @@ specVersion: 1.0.0 -description: {{ .Name }} Substreams-powered-Subgraph +description: {{toKebabCase .Name }} Substreams-powered-Subgraph indexerHints: prune: auto schema: file: ./schema.graphql dataSources: - kind: substreams - name: test + name: {{ .Name }} network: {{ .Network }} source: package: diff --git a/manifest/manifest.go b/manifest/manifest.go index c4da1e2b..8baff213 100644 --- a/manifest/manifest.go +++ b/manifest/manifest.go @@ -32,14 +32,13 @@ const ( // Manifest is a YAML structure used to create a Package and its list // of Modules. The notion of a manifest does not live in protobuf definitions. type Manifest struct { - SpecVersion string `yaml:"specVersion,omitempty"` // check that it equals v0.1.0 - Package PackageMeta `yaml:"package,omitempty"` - Protobuf Protobuf `yaml:"protobuf,omitempty"` - Imports mapSlice `yaml:"imports,omitempty"` - Binaries map[string]Binary `yaml:"binaries,omitempty"` - Modules []*Module `yaml:"modules,omitempty"` - Params map[string]string `yaml:"params,omitempty"` - + SpecVersion string `yaml:"specVersion,omitempty"` // check that it equals v0.1.0 + Package PackageMeta `yaml:"package,omitempty"` + Protobuf Protobuf `yaml:"protobuf,omitempty"` + Imports mapSlice `yaml:"imports,omitempty"` + Binaries map[string]Binary `yaml:"binaries,omitempty"` + Modules []*Module `yaml:"modules,omitempty"` + Params map[string]string `yaml:"params,omitempty"` BlockFilters map[string]string `yaml:"blockFilters,omitempty"` Network string `yaml:"network,omitempty"` Networks map[string]*NetworkParams `yaml:"networks,omitempty"` From cacf123e086ccc06c18150156c404cfd69f4b9ad Mon Sep 17 00:00:00 2001 From: Alexandre Bourget Date: Fri, 26 Jul 2024 12:23:43 -0400 Subject: [PATCH 5/6] It's not a dev preview anymore :) --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index c5e76db8..e88660d9 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,6 @@ # Substreams -> Developer preview - Substreams is a powerful blockchain indexing technology, developed for The Graph Network. Substreams enables developers to write Rust modules, composing data streams alongside the community, and provides extremely high performance indexing by virtue of parallelization, in a streaming-first fashion. From b7511d0a3e382074f60f7f54b79f3813d786a83b Mon Sep 17 00:00:00 2001 From: arnaudberger Date: Fri, 26 Jul 2024 12:31:57 -0400 Subject: [PATCH 6/6] add request module form and .devContainer handling --- codegen/helpers.go | 68 +++++++++++++++++++++++++++++++++++++-------- codegen/sql.go | 3 +- codegen/subgraph.go | 14 +++------- 3 files changed, 62 insertions(+), 23 deletions(-) diff --git a/codegen/helpers.go b/codegen/helpers.go index f76d1f3e..30c469ae 100644 --- a/codegen/helpers.go +++ b/codegen/helpers.go @@ -81,7 +81,11 @@ func processMessage(message *descriptorpb.DescriptorProto, parentName string, pr } } -func buildGenerateCommandFromArgs(manifestPath, moduleName, outputType string, withDevEnv bool) error { +func buildGenerateCommandFromArgs(manifestPath, outputType string, withDevEnv bool) error { + _, err := os.Stat(".devcontainer") + + isInDevContainer := !os.IsNotExist(err) + reader, err := manifest.NewReader(manifestPath) if err != nil { return fmt.Errorf("manifest reader: %w", err) @@ -92,7 +96,19 @@ func buildGenerateCommandFromArgs(manifestPath, moduleName, outputType string, w return fmt.Errorf("read manifest %q: %w", manifestPath, err) } - requestedModule, err := getModule(pkg, moduleName) + moduleNames := []string{} + for _, module := range pkg.Modules.Modules { + if module.Output != nil { + moduleNames = append(moduleNames, module.Name) + } + } + + selectedModule, err := createRequestModuleForm(moduleNames) + if err != nil { + return fmt.Errorf("creating request module form: %w", err) + } + + requestedModule, err := getModule(pkg, selectedModule) if err != nil { return fmt.Errorf("getting module: %w", err) } @@ -130,9 +146,16 @@ func buildGenerateCommandFromArgs(manifestPath, moduleName, outputType string, w return fmt.Errorf("rendering project files: %w", err) } - saveDir, err := createSaveDirForm() - if err != nil { - fmt.Println("creating save directory: %w", err) + saveDir := "subgraph" + if cwd, err := os.Getwd(); err == nil { + saveDir = filepath.Join(cwd, saveDir) + } + + if !isInDevContainer { + saveDir, err = createSaveDirForm(saveDir) + if err != nil { + fmt.Println("creating save directory: %w", err) + } } err = saveProjectFiles(projectFiles, saveDir) @@ -143,12 +166,7 @@ func buildGenerateCommandFromArgs(manifestPath, moduleName, outputType string, w return nil } -func createSaveDirForm() (string, error) { - saveDir := "subgraph" - if cwd, err := os.Getwd(); err == nil { - saveDir = filepath.Join(cwd, saveDir) - } - +func createSaveDirForm(saveDir string) (string, error) { inputField := huh.NewInput().Title("In which directory do you want to generate the project?").Value(&saveDir) var WITH_ACCESSIBLE = false @@ -182,3 +200,31 @@ func saveProjectFiles(projectFiles map[string][]byte, saveDir string) error { return nil } + +func createRequestModuleForm(labels []string) (string, error) { + if len(labels) == 0 { + fmt.Println("Hmm, the server sent no option to select from (!)") + } + + var options []huh.Option[string] + optionsMap := make(map[string]string) + for i := 0; i < len(labels); i++ { + entry := huh.Option[string]{ + Key: labels[i], + Value: labels[i], + } + options = append(options, entry) + optionsMap[entry.Value] = entry.Key + } + var selection string + selectField := huh.NewSelect[string](). + Options(options...). + Value(&selection) + + err := huh.NewForm(huh.NewGroup(selectField)).WithTheme(huh.ThemeCharm()).Run() + if err != nil { + return "", fmt.Errorf("failed taking input: %w", err) + } + + return selection, nil +} diff --git a/codegen/sql.go b/codegen/sql.go index c2355c81..b4370a05 100644 --- a/codegen/sql.go +++ b/codegen/sql.go @@ -16,10 +16,9 @@ var SQLCmd = &cobra.Command{ func generateSQLEnv(cmd *cobra.Command, args []string) error { manifestPath := args[0] - moduleName := args[1] withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") - err := buildGenerateCommandFromArgs(manifestPath, moduleName, outputTypeSubgraph, withDevEnv) + err := buildGenerateCommandFromArgs(manifestPath, outputTypeSubgraph, withDevEnv) if err != nil { return fmt.Errorf("building generate command: %w", err) } diff --git a/codegen/subgraph.go b/codegen/subgraph.go index 8cb19acf..472cc07c 100644 --- a/codegen/subgraph.go +++ b/codegen/subgraph.go @@ -8,27 +8,21 @@ import ( ) var SubgraphCmd = &cobra.Command{ - Use: "subgraph [] ", + Use: "subgraph []", Short: "Generate subgraph dev environment from substreams manifest", - Args: cobra.RangeArgs(1, 2), + Args: cobra.RangeArgs(0, 1), RunE: generateSubgraphEnv, } func generateSubgraphEnv(cmd *cobra.Command, args []string) error { manifestPath := "" - moduleName := "" - if len(args) == 2 { - manifestPath = args[0] - moduleName = args[1] - } - if len(args) == 1 { - moduleName = args[0] + manifestPath = args[0] } withDevEnv := sflags.MustGetBool(cmd, "with-dev-env") - err := buildGenerateCommandFromArgs(manifestPath, moduleName, outputTypeSubgraph, withDevEnv) + err := buildGenerateCommandFromArgs(manifestPath, outputTypeSubgraph, withDevEnv) if err != nil { return fmt.Errorf("building generate command: %w", err) }