From d4c899c8c4d731effbf1495b036cf0e6b406f0eb Mon Sep 17 00:00:00 2001 From: ValdemarGr Date: Thu, 21 Mar 2024 14:22:26 +0000 Subject: [PATCH] Deploy website - based on 06471a0f39a9b881398490bb4bf7c287bb2dc39e --- 404.html | 4 ++-- ...b0880.1f85cf71.js => 018b0880.ef45eea6.js} | 2 +- ...4bcdb.8d243e93.js => 0a44bcdb.60a58c3b.js} | 2 +- assets/js/4f169309.9c6adec9.js | 1 + assets/js/4f169309.ad8bcbf1.js | 1 - ...f8b26.18142116.js => 62af8b26.1bedbcd2.js} | 2 +- ...daa8b.a97d5fbe.js => 677daa8b.0ae8387b.js} | 2 +- ...8ea58.e8321fab.js => 8588ea58.3b3182e4.js} | 2 +- ...ae478.9c63c4e7.js => 92cae478.5c0b8d4e.js} | 2 +- ...10064.fb80a129.js => ceb10064.65987bd1.js} | 2 +- ...79f40.9c6cb8e8.js => ffc79f40.fc52bab8.js} | 2 +- assets/js/runtime~main.6261d4fd.js | 1 + assets/js/runtime~main.c1d8c9db.js | 1 - docs/client/code-generation/index.html | 6 ++--- docs/client/dsl/index.html | 8 +++---- docs/client/integrations/http4s/index.html | 4 ++-- docs/overview/index.html | 4 ++-- docs/overview/modules/index.html | 4 ++-- docs/server/execution/planning/index.html | 6 ++--- docs/server/execution/statistics/index.html | 4 ++-- docs/server/integrations/goi/index.html | 4 ++-- docs/server/integrations/graphqlws/index.html | 4 ++-- docs/server/integrations/http4s/index.html | 4 ++-- docs/server/integrations/natchez/index.html | 4 ++-- .../server/integrations/relational/index.html | 6 ++--- docs/server/schema/arrow_dsl/index.html | 8 +++---- docs/server/schema/compiler/index.html | 4 ++-- docs/server/schema/context/index.html | 4 ++-- docs/server/schema/dsl/index.html | 6 ++--- docs/server/schema/error_handling/index.html | 6 ++--- docs/server/schema/extending/index.html | 4 ++-- docs/server/schema/index.html | 4 ++-- docs/server/schema/input_types/index.html | 4 ++-- docs/server/schema/output_types/index.html | 4 ++-- docs/server/schema/resolvers/index.html | 22 +++++++++---------- .../server/schema/structuring_apps/index.html | 6 ++--- docs/tutorial/index.html | 4 ++-- index.html | 4 ++-- markdown-page/index.html | 4 ++-- 39 files changed, 83 insertions(+), 83 deletions(-) rename assets/js/{018b0880.1f85cf71.js => 018b0880.ef45eea6.js} (99%) rename assets/js/{0a44bcdb.8d243e93.js => 0a44bcdb.60a58c3b.js} (99%) create mode 100644 assets/js/4f169309.9c6adec9.js delete mode 100644 assets/js/4f169309.ad8bcbf1.js rename assets/js/{62af8b26.18142116.js => 62af8b26.1bedbcd2.js} (85%) rename assets/js/{677daa8b.a97d5fbe.js => 677daa8b.0ae8387b.js} (98%) rename assets/js/{8588ea58.e8321fab.js => 8588ea58.3b3182e4.js} (96%) rename assets/js/{92cae478.9c63c4e7.js => 92cae478.5c0b8d4e.js} (99%) rename assets/js/{ceb10064.fb80a129.js => ceb10064.65987bd1.js} (95%) rename assets/js/{ffc79f40.9c6cb8e8.js => ffc79f40.fc52bab8.js} (97%) create mode 100644 assets/js/runtime~main.6261d4fd.js delete mode 100644 assets/js/runtime~main.c1d8c9db.js diff --git a/404.html b/404.html index bd18b185..7ea93efd 100644 --- a/404.html +++ b/404.html @@ -4,13 +4,13 @@ Page Not Found | gql - +
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

- + \ No newline at end of file diff --git a/assets/js/018b0880.1f85cf71.js b/assets/js/018b0880.ef45eea6.js similarity index 99% rename from assets/js/018b0880.1f85cf71.js rename to assets/js/018b0880.ef45eea6.js index cdf5cedb..6e66ce39 100644 --- a/assets/js/018b0880.1f85cf71.js +++ b/assets/js/018b0880.ef45eea6.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[227],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),o=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},c=function(e){var n=o(e.components);return a.createElement(l.Provider,{value:n},e.children)},m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},y=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,s=e.originalType,l=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),y=o(t),u=r,d=y["".concat(l,".").concat(u)]||y[u]||m[u]||s;return t?a.createElement(d,i(i({ref:n},c),{},{components:t})):a.createElement(d,i({ref:n},c))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var s=t.length,i=new Array(s);i[0]=y;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p.mdxType="string"==typeof e?e:r,i[1]=p;for(var o=2;o{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>m,frontMatter:()=>s,metadata:()=>p,toc:()=>o});var a=t(7462),r=(t(7294),t(3905));const s={title:"Structuring large applications"},i=void 0,p={unversionedId:"server/schema/structuring_apps",id:"server/schema/structuring_apps",title:"Structuring large applications",description:"The documentation explores smaller examples.",source:"@site/docs/server/schema/structuring_apps.md",sourceDirName:"server/schema",slug:"/server/schema/structuring_apps",permalink:"/gql/docs/server/schema/structuring_apps",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/structuring_apps.md",tags:[],version:"current",frontMatter:{title:"Structuring large applications"},sidebar:"docs",previous:{title:"Extending schemas",permalink:"/gql/docs/server/schema/extending"},next:{title:"Planning",permalink:"/gql/docs/server/execution/planning"}},l={},o=[{value:"Seperating domains",id:"seperating-domains",level:2},{value:"Mutually recursive domains",id:"mutually-recursive-domains",level:2},{value:"Call by name constructor parameters",id:"call-by-name-constructor-parameters",level:3},{value:"Cake",id:"cake",level:3}],c={toc:o};function m(e){let{components:n,...t}=e;return(0,r.kt)("wrapper",(0,a.Z)({},c,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"The documentation explores smaller examples.\nTo host larger graphs there are some considerations that must be addressed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"What up-front work can be done to minimize the overhead in introducing new types."),(0,r.kt)("li",{parentName:"ul"},"How is (mutual) recursion handled between different domains.")),(0,r.kt)("p",null,"Recursive datatypes are notoriously difficult to deal with.\nIn functional programming lazyness is often exploited as a solution to introduce cyclic data, but can easily accidentally introduce infinite recursion."),(0,r.kt)("h2",{id:"seperating-domains"},"Seperating domains"),(0,r.kt)("p",null,"Partially applying all needed dependencies can be expressed with a class."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.effect._\nimport gql._\nimport gql.ast._\nimport gql.dsl._\n\nfinal case class Organization(\n id: String,\n name: String\n)\n\nfinal case class User(\n id: String,\n name: String,\n organizationId: String\n)\n\ntrait Repo {\n def getUser(id: String): IO[User]\n def getOrganization(id: String): IO[Organization]\n def getOrganizationUsers(organizationId: String): IO[List[User]]\n}\n\nclass UserTypes(repo: Repo) {\n // notice how we bind the effect (IO) so that we can omit this parameter in the dsl\n val dsl = new GqlDsl[IO] {}\n import dsl._\n\n implicit val organization: Type[IO, Organization] = \n tpe[Organization](\n "Organization",\n "id" -> lift(_.id),\n "name" -> lift(_.name),\n "users" -> eff(x => repo.getOrganizationUsers(x.id))\n )\n\n implicit val user: Type[IO, User] =\n tpe[User](\n "User",\n "id" -> lift(_.id),\n "name" -> lift(_.name),\n "organization" -> eff(x => repo.getOrganization(x.organizationId))\n )\n}\n')),(0,r.kt)("details",null,(0,r.kt)("summary",null,"You can also extend the dsl if you prefer a more object oriented style."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"class UserTypes(repo: Repo) extends GqlDsl[IO] {\n // ...\n}\n"))),(0,r.kt)("h2",{id:"mutually-recursive-domains"},"Mutually recursive domains"),(0,r.kt)("p",null,"Subgraphs can neatly packaged into classes, but that does not address the issue of recursion between different domains."),(0,r.kt)("h3",{id:"call-by-name-constructor-parameters"},"Call by name constructor parameters"),(0,r.kt)("p",null,"A compositional approach is to use call by name constructor parameters to lazily pass mutually recursive dependencies."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"class UserTypes(paymentTypes: => PaymentTypes) {\n lazy val p = paymentTypes\n import p._\n // ...\n}\n\nclass PaymentTypes(userTypes: => UserTypes) {\n lazy val u = userTypes\n import u._\n // ...\n}\n\nlazy val userTypes: UserTypes = new UserTypes(paymentTypes)\nlazy val paymentTypes: PaymentTypes = new PaymentTypes(userTypes)\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"When domain types are defined in seperate projects, OOP interfaces can be used to implement mutual recursion."),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"// core project\ntrait User\ntrait UserTypes {\n // we can also choose to only expose the datatypes that are necessary\n implicit def userType: Type[IO, User]\n}\ntrait Payment\ntrait PaymentTypes {\n implicit def paymentType: Type[IO, Payment]\n}\n\n// user project\nclass UserTypesImpl(paymentTypes: => PaymentTypes) extends UserTypes {\n lazy val p = paymentTypes\n import p._\n def userType: Type[IO, User] = ???\n}\n\n// payment project\nclass PaymentTypesImpl(userTypes: => UserTypes) extends PaymentTypes {\n lazy val u = userTypes\n import u._\n def paymentType: Type[IO, Payment] = ???\n}\n\n// main project\nlazy val userTypes: UserTypes = new UserTypesImpl(paymentTypes)\nlazy val paymentTypes: PaymentTypes = new PaymentTypesImpl(userTypes)\n"))),(0,r.kt)("h3",{id:"cake"},"Cake"),(0,r.kt)("p",null,"The cake pattern can also be used to define mutually recursive dependencies, at the cost of composability."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"// core project\ntrait User\ntrait UserTypes {\n // we can also choose to only expose the datatypes that are necessary\n implicit def userType: Type[IO, User]\n}\ntrait Payment\ntrait PaymentTypes {\n implicit def paymentType: Type[IO, Payment]\n}\n\n// user project\ntrait UserTypesImpl extends UserTypes { self: PaymentTypes =>\n import self._\n def userType: Type[IO, User] = ???\n}\n\n// payment project\ntrait PaymentTypesImpl extends PaymentTypes { self: UserTypes =>\n import self._\n def paymentType: Type[IO, Payment] = ???\n}\n\n// main project\nval allTypes = new UserTypesImpl with PaymentTypesImpl { }\n// allTypes: AnyRef with UserTypesImpl with PaymentTypesImpl = repl.MdocSession$MdocApp$$anon$2@194d88ef\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[227],{3905:(e,n,t)=>{t.d(n,{Zo:()=>c,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function s(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var l=a.createContext({}),o=function(e){var n=a.useContext(l),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},c=function(e){var n=o(e.components);return a.createElement(l.Provider,{value:n},e.children)},m={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},y=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,s=e.originalType,l=e.parentName,c=p(e,["components","mdxType","originalType","parentName"]),y=o(t),u=r,d=y["".concat(l,".").concat(u)]||y[u]||m[u]||s;return t?a.createElement(d,i(i({ref:n},c),{},{components:t})):a.createElement(d,i({ref:n},c))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var s=t.length,i=new Array(s);i[0]=y;var p={};for(var l in n)hasOwnProperty.call(n,l)&&(p[l]=n[l]);p.originalType=e,p.mdxType="string"==typeof e?e:r,i[1]=p;for(var o=2;o{t.r(n),t.d(n,{assets:()=>l,contentTitle:()=>i,default:()=>m,frontMatter:()=>s,metadata:()=>p,toc:()=>o});var a=t(7462),r=(t(7294),t(3905));const s={title:"Structuring large applications"},i=void 0,p={unversionedId:"server/schema/structuring_apps",id:"server/schema/structuring_apps",title:"Structuring large applications",description:"The documentation explores smaller examples.",source:"@site/docs/server/schema/structuring_apps.md",sourceDirName:"server/schema",slug:"/server/schema/structuring_apps",permalink:"/gql/docs/server/schema/structuring_apps",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/structuring_apps.md",tags:[],version:"current",frontMatter:{title:"Structuring large applications"},sidebar:"docs",previous:{title:"Extending schemas",permalink:"/gql/docs/server/schema/extending"},next:{title:"Planning",permalink:"/gql/docs/server/execution/planning"}},l={},o=[{value:"Seperating domains",id:"seperating-domains",level:2},{value:"Mutually recursive domains",id:"mutually-recursive-domains",level:2},{value:"Call by name constructor parameters",id:"call-by-name-constructor-parameters",level:3},{value:"Cake",id:"cake",level:3}],c={toc:o};function m(e){let{components:n,...t}=e;return(0,r.kt)("wrapper",(0,a.Z)({},c,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"The documentation explores smaller examples.\nTo host larger graphs there are some considerations that must be addressed."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"What up-front work can be done to minimize the overhead in introducing new types."),(0,r.kt)("li",{parentName:"ul"},"How is (mutual) recursion handled between different domains.")),(0,r.kt)("p",null,"Recursive datatypes are notoriously difficult to deal with.\nIn functional programming lazyness is often exploited as a solution to introduce cyclic data, but can easily accidentally introduce infinite recursion."),(0,r.kt)("h2",{id:"seperating-domains"},"Seperating domains"),(0,r.kt)("p",null,"Partially applying all needed dependencies can be expressed with a class."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.effect._\nimport gql._\nimport gql.ast._\nimport gql.dsl._\n\nfinal case class Organization(\n id: String,\n name: String\n)\n\nfinal case class User(\n id: String,\n name: String,\n organizationId: String\n)\n\ntrait Repo {\n def getUser(id: String): IO[User]\n def getOrganization(id: String): IO[Organization]\n def getOrganizationUsers(organizationId: String): IO[List[User]]\n}\n\nclass UserTypes(repo: Repo) {\n // notice how we bind the effect (IO) so that we can omit this parameter in the dsl\n val dsl = new GqlDsl[IO] {}\n import dsl._\n\n implicit val organization: Type[IO, Organization] = \n tpe[Organization](\n "Organization",\n "id" -> lift(_.id),\n "name" -> lift(_.name),\n "users" -> eff(x => repo.getOrganizationUsers(x.id))\n )\n\n implicit val user: Type[IO, User] =\n tpe[User](\n "User",\n "id" -> lift(_.id),\n "name" -> lift(_.name),\n "organization" -> eff(x => repo.getOrganization(x.organizationId))\n )\n}\n')),(0,r.kt)("details",null,(0,r.kt)("summary",null,"You can also extend the dsl if you prefer a more object oriented style."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"class UserTypes(repo: Repo) extends GqlDsl[IO] {\n // ...\n}\n"))),(0,r.kt)("h2",{id:"mutually-recursive-domains"},"Mutually recursive domains"),(0,r.kt)("p",null,"Subgraphs can neatly packaged into classes, but that does not address the issue of recursion between different domains."),(0,r.kt)("h3",{id:"call-by-name-constructor-parameters"},"Call by name constructor parameters"),(0,r.kt)("p",null,"A compositional approach is to use call by name constructor parameters to lazily pass mutually recursive dependencies."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"class UserTypes(paymentTypes: => PaymentTypes) {\n lazy val p = paymentTypes\n import p._\n // ...\n}\n\nclass PaymentTypes(userTypes: => UserTypes) {\n lazy val u = userTypes\n import u._\n // ...\n}\n\nlazy val userTypes: UserTypes = new UserTypes(paymentTypes)\nlazy val paymentTypes: PaymentTypes = new PaymentTypes(userTypes)\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"When domain types are defined in seperate projects, OOP interfaces can be used to implement mutual recursion."),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"// core project\ntrait User\ntrait UserTypes {\n // we can also choose to only expose the datatypes that are necessary\n implicit def userType: Type[IO, User]\n}\ntrait Payment\ntrait PaymentTypes {\n implicit def paymentType: Type[IO, Payment]\n}\n\n// user project\nclass UserTypesImpl(paymentTypes: => PaymentTypes) extends UserTypes {\n lazy val p = paymentTypes\n import p._\n def userType: Type[IO, User] = ???\n}\n\n// payment project\nclass PaymentTypesImpl(userTypes: => UserTypes) extends PaymentTypes {\n lazy val u = userTypes\n import u._\n def paymentType: Type[IO, Payment] = ???\n}\n\n// main project\nlazy val userTypes: UserTypes = new UserTypesImpl(paymentTypes)\nlazy val paymentTypes: PaymentTypes = new PaymentTypesImpl(userTypes)\n"))),(0,r.kt)("h3",{id:"cake"},"Cake"),(0,r.kt)("p",null,"The cake pattern can also be used to define mutually recursive dependencies, at the cost of composability."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"// core project\ntrait User\ntrait UserTypes {\n // we can also choose to only expose the datatypes that are necessary\n implicit def userType: Type[IO, User]\n}\ntrait Payment\ntrait PaymentTypes {\n implicit def paymentType: Type[IO, Payment]\n}\n\n// user project\ntrait UserTypesImpl extends UserTypes { self: PaymentTypes =>\n import self._\n def userType: Type[IO, User] = ???\n}\n\n// payment project\ntrait PaymentTypesImpl extends PaymentTypes { self: UserTypes =>\n import self._\n def paymentType: Type[IO, Payment] = ???\n}\n\n// main project\nval allTypes = new UserTypesImpl with PaymentTypesImpl { }\n// allTypes: AnyRef with UserTypesImpl with PaymentTypesImpl = repl.MdocSession$MdocApp$$anon$2@3f7e25e7\n")))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/0a44bcdb.8d243e93.js b/assets/js/0a44bcdb.60a58c3b.js similarity index 99% rename from assets/js/0a44bcdb.8d243e93.js rename to assets/js/0a44bcdb.60a58c3b.js index 3fe2fbdc..e054e2fe 100644 --- a/assets/js/0a44bcdb.8d243e93.js +++ b/assets/js/0a44bcdb.60a58c3b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[436],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),p=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=p(e.components);return a.createElement(s.Provider,{value:t},e.children)},m={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=p(n),d=i,f=u["".concat(s,".").concat(d)]||u[d]||m[d]||r;return n?a.createElement(f,l(l({ref:t},c),{},{components:n})):a.createElement(f,l({ref:t},c))}));function d(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,l=new Array(r);l[0]=u;var o={};for(var s in t)hasOwnProperty.call(t,s)&&(o[s]=t[s]);o.originalType=e,o.mdxType="string"==typeof e?e:i,l[1]=o;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>m,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=n(7462),i=(n(7294),n(3905));const r={title:"The DSL"},l=void 0,o={unversionedId:"server/schema/dsl",id:"server/schema/dsl",title:"The DSL",description:"gql's dsl is a lightweight set of smart-constructors.",source:"@site/docs/server/schema/dsl.md",sourceDirName:"server/schema",slug:"/server/schema/dsl",permalink:"/gql/docs/server/schema/dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/dsl.md",tags:[],version:"current",frontMatter:{title:"The DSL"},sidebar:"docs",previous:{title:"Input types",permalink:"/gql/docs/server/schema/input_types"},next:{title:"Monadic Resolver DSL",permalink:"/gql/docs/server/schema/arrow_dsl"}},s={},p=[{value:"Fields",id:"fields",level:2},{value:"Builders",id:"builders",level:3},{value:"Value resolution",id:"value-resolution",level:3},{value:"Unification instances",id:"unification-instances",level:2},{value:"Interface inheritance",id:"interface-inheritance",level:3},{value:"Input types",id:"input-types",level:2},{value:"Other output structures",id:"other-output-structures",level:2},{value:"Covariant effects",id:"covariant-effects",level:3}],c={toc:p};function m(e){let{components:t,...n}=e;return(0,i.kt)("wrapper",(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"gql's dsl is a lightweight set of smart-constructors.\nIf you have a particular usecase or even coding style that conflicts with the dsl, you can always introduce your own schema definition syntax or build on top of the existing dsl."),(0,i.kt)("p",null,"Lets begin by importing what we need."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"import cats.data._\nimport cats.effect._\nimport cats.implicits._\nimport gql.dsl.all._\nimport gql.ast._\nimport gql.resolver._\n")),(0,i.kt)("h2",{id:"fields"},"Fields"),(0,i.kt)("p",null,"The simplest form of field construction comes from the ",(0,i.kt)("inlineCode",{parentName:"p"},"build.from")," smart constructor.\nIt simply lifts a resolver into a field, given that a gql output type exists for the resolver output."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"def r: Resolver[IO, Int, String] = Resolver.lift(i => i.toString())\n\nval f: Field[IO, Int, String] = build.from(r)\n// f: Field[IO, Int, String] = Field(\n// resolve = gql.resolver.Resolver@539567dd,\n// output = cats.Always@141211d5,\n// description = None,\n// attributes = List()\n// )\n")),(0,i.kt)("p",null,"Sometimes type inference cannot find the proper type for a field:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build.from(Resolver.liftF(i => IO(i.toString())))\n// error: value liftF is not a member of object gql.resolver.Resolver\n// did you mean lift? or perhaps liftFull?\n// build.from(Resolver.liftF(i => IO(i.toString())))\n// ^^^^^^^^^^^^^^\n")),(0,i.kt)("p",null,"The type parameters for ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," are partially applied, such that when type inference isn't enough, types can be supplied explicitly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build[IO, Int].from(Resolver.effect(i => IO(i.toString())))\n\nbuild.from(Resolver.effect((i: Int) => IO(i.toString())))\n")),(0,i.kt)("p",null,"For some fields, there is an even more concise syntax.\nInvoking the ",(0,i.kt)("inlineCode",{parentName:"p"},"apply")," method of ",(0,i.kt)("inlineCode",{parentName:"p"},"build"),", takes a higher order function that goes from the identity resolver (",(0,i.kt)("inlineCode",{parentName:"p"},"Resolver[F, A, A]"),") to some output."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build[IO, Int](_.map(i => i * 2).evalMap(i => IO(i))): Field[IO, Int, Int]\n")),(0,i.kt)("h3",{id:"builders"},"Builders"),(0,i.kt)("p",null,"Complex structures may require many special resolver compositions.\nThe dsl also introduces a something akin to a builder pattern.\nThe ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," function from the previous section, creates a builder that has more constructors than just ",(0,i.kt)("inlineCode",{parentName:"p"},"from")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"apply"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.dsl.FieldBuilder\nval b: FieldBuilder[IO, Int] = build[IO, Int]\n")),(0,i.kt)("p",null,"Often a builder is only relevant within a scope, thus one can end up having many unused builders in scope.\nThe ",(0,i.kt)("inlineCode",{parentName:"p"},"builder")," makes such code more concise:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"builder[IO, Int]{ (fb: FieldBuilder[IO, Int]) =>\n fb\n}\n")),(0,i.kt)("p",null,"The builder dsl contains most of the field related constructors:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'builder[IO, Int]{ fb =>\n fb.tpe(\n "Query",\n "answer" -> lift(i => i * 0 + 42),\n "pong" -> fb(_.map(_ => "pong"))\n ): Type[IO, Int]\n \n fb.fields(\n "answer" -> fb.lift(i => i * 0 + 42),\n "ping" -> fb.from(Resolver.lift(_ => "pong"))\n )\n}\n')),(0,i.kt)("h3",{id:"value-resolution"},"Value resolution"),(0,i.kt)("p",null,"Wrapping every field in a ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," smart constructor and then defining the resolver seperately is a bit verbose.\nThere are smart constructors for two common variants of field resolvers, that lift a resolver function directly to a ",(0,i.kt)("inlineCode",{parentName:"p"},"Field"),"."),(0,i.kt)("p",null,"We must decide if the field is pure or effectful:"),(0,i.kt)("admonition",{type:"note"},(0,i.kt)("p",{parentName:"admonition"},"The effect constructor is named ",(0,i.kt)("inlineCode",{parentName:"p"},"eff")," to avoid collisions with cats-effect.")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'final case class Person(\n name: String\n)\n\ntpe[IO, Person](\n "Person",\n "name" -> lift(_.name),\n "nameEffect" -> eff(x => IO(x.name))\n)\n')),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"lift")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"eff")," constructors can also also be supplied with arguments:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'def familyName = arg[String]("familyName")\n\ntpe[IO, Person](\n "Person",\n "name" -> lift(familyName)(_ + _.name),\n "nameEffect" -> eff(familyName)((f, p) => IO(p.name + f))\n)\n')),(0,i.kt)("h2",{id:"unification-instances"},"Unification instances"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Union"),"s and ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface"),"s are abstract types that have implementations."),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Union")," declares it's implementations up-front, like a ",(0,i.kt)("inlineCode",{parentName:"p"},"sealed trait"),".\nHowever, ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface")," implementations are declared on the types that implement the interface, like a ",(0,i.kt)("inlineCode",{parentName:"p"},"trait")," or an ",(0,i.kt)("inlineCode",{parentName:"p"},"abstract class"),"."),(0,i.kt)("p",null,"Before continuing, lets setup the environment."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"trait Vehicle { \n def name: String\n}\nfinal case class Car(name: String) extends Vehicle\nfinal case class Boat(name: String) extends Vehicle\nfinal case class Truck(name: String) extends Vehicle\n\n")),(0,i.kt)("p",null,"For the ",(0,i.kt)("inlineCode",{parentName:"p"},"Union"),", variants can be declared using the ",(0,i.kt)("inlineCode",{parentName:"p"},"variant")," function, which takes a ",(0,i.kt)("inlineCode",{parentName:"p"},"PartialFunction")," from the unifying type to the implementation."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit def car: Type[IO, Car] = ???\nimplicit def boat: Type[IO, Boat] = ???\nimplicit def truck: Type[IO, Truck] = ???\n\nunion[IO, Vehicle]("Vehicle")\n .variant[Car] { case c: Car => c }\n .variant[Boat] { case b: Boat => b }\n .variant[Truck] { case t: Truck => t }\n')),(0,i.kt)("p",null,"A shorthand function exists, if the type of the variant is a subtype of the unifying type."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'union[IO, Vehicle]("Vehicle")\n .subtype[Car] \n .subtype[Boat] \n .subtype[Truck] \n')),(0,i.kt)("p",null,"For an ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface")," the same dsl exists, but is placed on the types that can implement the interface (a ",(0,i.kt)("inlineCode",{parentName:"p"},"Type")," or another ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface"),")."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val vehicle: Interface[IO, Vehicle] = interface[IO, Vehicle](\n "Vehicle",\n "name" -> abst[IO, String]\n)\n\ntpe[IO, Car]("Car", "name" -> lift(_.name))\n .implements[Vehicle]{ case c: Car => c }\n \ntpe[IO, Boat]("Boat", "name" -> lift(_.name))\n .subtypeOf[Vehicle]\n \ntrait OtherVehicle extends Vehicle {\n def weight: Int\n}\n\ninterface[IO, OtherVehicle](\n "OtherVehicle",\n "weight" -> abst[IO, Int],\n // Since OtherVehicle is a subtype of Vehicle\n // we can directly embed the Vehicle fields\n vehicle.abstractFields: _*\n).implements[Vehicle]\n')),(0,i.kt)("h3",{id:"interface-inheritance"},"Interface inheritance"),(0,i.kt)("p",null,"It can be a bit cumbersome to implement an interface's fields every time it is extended.\nInterfaces accept any field type (abstract or concrete) as input.\nThis is convinient since it allows a safe type of inheritance.\nWhen using the ",(0,i.kt)("inlineCode",{parentName:"p"},"subtypeImpl")," function, all possible fields are added to the type."),(0,i.kt)("admonition",{type:"info"},(0,i.kt)("p",{parentName:"admonition"},"gql's inheritance has some implications:"),(0,i.kt)("ul",{parentName:"admonition"},(0,i.kt)("li",{parentName:"ul"},"If you're working an a ",(0,i.kt)("inlineCode",{parentName:"li"},"Type"),", only concrete fields can be inherited."),(0,i.kt)("li",{parentName:"ul"},"If you're working on an ",(0,i.kt)("inlineCode",{parentName:"li"},"Interface"),", all fields, concrete and abstract can be inherited.")),(0,i.kt)("p",{parentName:"admonition"},"gql picks the best field when you inherit from an interface.\nFor two fields with the same name, gql will always pick the concrete field.\nIf both are concrete, it will prioritize the field from the subtype (the type you're working on).")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'trait Pet {\n def name: String\n def age: Int\n def weight: Double\n}\n\ncase class Dog(name: String, age: Int, weight: Double) extends Pet\n\nimplicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](\n "Pet",\n "name" -> lift(_.name),\n "age" -> lift(_.age),\n "weight" -> lift(_.weight)\n)\n\nlazy val overwirttenName = lift[Dog](_.name)\n\nimplicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](\n "Dog",\n "bark" -> lift(_ => "woof!"),\n "name" -> overwirttenName\n).subtypeImpl[Pet]\n\ndog.fields.map{ case (k, _) => k}.mkString_(", ")\n// res13: String = "bark, name, age, weight"\n\n// The Dog type has it\'s own implementation of the name field\ndog.fields.exists{ case (_, v) => v == overwirttenName }\n// res14: Boolean = true\n')),(0,i.kt)("p",null,"To showcase the inheritance a bit further, consider the following invalid schema."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](\n "Pet",\n "name" -> lift(_.name),\n "age" -> lift(_.age),\n // Notice that weight is abstract\n "weight" -> abst[IO, Double]\n)\n\nimplicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](\n "Dog",\n "bark" -> lift(_ => "woof!")\n).subtypeImpl[Pet]\n\n// We are missing the weight field\ndog.fields.map{ case (k, _) => k}.mkString_(", ")\n// res15: String = "bark, name, age"\n')),(0,i.kt)("admonition",{type:"tip"},(0,i.kt)("p",{parentName:"admonition"},(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/#validation"},"Schema validation")," will catch such errors.")),(0,i.kt)("h2",{id:"input-types"},"Input types"),(0,i.kt)("p",null,"Review the ",(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/input_types"},"Input types")," section for more information."),(0,i.kt)("h2",{id:"other-output-structures"},"Other output structures"),(0,i.kt)("p",null,"Examples of other structures can be in the ",(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/output_types"},"Output types")," section."),(0,i.kt)("h3",{id:"covariant-effects"},"Covariant effects"),(0,i.kt)("p",null,"Output types in gql are covariant in ",(0,i.kt)("inlineCode",{parentName:"p"},"F"),", such that output types written in different effects seamlessly weave together.\n",(0,i.kt)("inlineCode",{parentName:"p"},"fs2")," provides a type that we can reuse for pure effects defined as ",(0,i.kt)("inlineCode",{parentName:"p"},"type Pure[A] <: Nothing"),"."),(0,i.kt)("p",null,"With this trick, we can define gql types for trivial cases of our domain:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'final case class Entity(\n name: String,\n age: Int\n)\n\nobject Entity {\n implicit lazy val gqlType: Type[fs2.Pure, Entity] = tpe[fs2.Pure, Entity](\n "Entity",\n "name" -> lift(_.name),\n "age" -> lift(_.age)\n )\n}\n\ntrait Example\n\ntpe[IO, Example](\n "Example",\n "entity" -> lift(_ => Entity("John Doe", 42))\n)\n')))}m.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[436],{3905:(e,t,n)=>{n.d(t,{Zo:()=>c,kt:()=>d});var a=n(7294);function i(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function r(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);t&&(a=a.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,a)}return n}function l(e){for(var t=1;t=0||(i[n]=e[n]);return i}(e,t);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(i[n]=e[n])}return i}var s=a.createContext({}),p=function(e){var t=a.useContext(s),n=t;return e&&(n="function"==typeof e?e(t):l(l({},t),e)),n},c=function(e){var t=p(e.components);return a.createElement(s.Provider,{value:t},e.children)},m={inlineCode:"code",wrapper:function(e){var t=e.children;return a.createElement(a.Fragment,{},t)}},u=a.forwardRef((function(e,t){var n=e.components,i=e.mdxType,r=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),u=p(n),d=i,f=u["".concat(s,".").concat(d)]||u[d]||m[d]||r;return n?a.createElement(f,l(l({ref:t},c),{},{components:n})):a.createElement(f,l({ref:t},c))}));function d(e,t){var n=arguments,i=t&&t.mdxType;if("string"==typeof e||i){var r=n.length,l=new Array(r);l[0]=u;var o={};for(var s in t)hasOwnProperty.call(t,s)&&(o[s]=t[s]);o.originalType=e,o.mdxType="string"==typeof e?e:i,l[1]=o;for(var p=2;p{n.r(t),n.d(t,{assets:()=>s,contentTitle:()=>l,default:()=>m,frontMatter:()=>r,metadata:()=>o,toc:()=>p});var a=n(7462),i=(n(7294),n(3905));const r={title:"The DSL"},l=void 0,o={unversionedId:"server/schema/dsl",id:"server/schema/dsl",title:"The DSL",description:"gql's dsl is a lightweight set of smart-constructors.",source:"@site/docs/server/schema/dsl.md",sourceDirName:"server/schema",slug:"/server/schema/dsl",permalink:"/gql/docs/server/schema/dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/dsl.md",tags:[],version:"current",frontMatter:{title:"The DSL"},sidebar:"docs",previous:{title:"Input types",permalink:"/gql/docs/server/schema/input_types"},next:{title:"Monadic Resolver DSL",permalink:"/gql/docs/server/schema/arrow_dsl"}},s={},p=[{value:"Fields",id:"fields",level:2},{value:"Builders",id:"builders",level:3},{value:"Value resolution",id:"value-resolution",level:3},{value:"Unification instances",id:"unification-instances",level:2},{value:"Interface inheritance",id:"interface-inheritance",level:3},{value:"Input types",id:"input-types",level:2},{value:"Other output structures",id:"other-output-structures",level:2},{value:"Covariant effects",id:"covariant-effects",level:3}],c={toc:p};function m(e){let{components:t,...n}=e;return(0,i.kt)("wrapper",(0,a.Z)({},c,n,{components:t,mdxType:"MDXLayout"}),(0,i.kt)("p",null,"gql's dsl is a lightweight set of smart-constructors.\nIf you have a particular usecase or even coding style that conflicts with the dsl, you can always introduce your own schema definition syntax or build on top of the existing dsl."),(0,i.kt)("p",null,"Lets begin by importing what we need."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"import cats.data._\nimport cats.effect._\nimport cats.implicits._\nimport gql.dsl.all._\nimport gql.ast._\nimport gql.resolver._\n")),(0,i.kt)("h2",{id:"fields"},"Fields"),(0,i.kt)("p",null,"The simplest form of field construction comes from the ",(0,i.kt)("inlineCode",{parentName:"p"},"build.from")," smart constructor.\nIt simply lifts a resolver into a field, given that a gql output type exists for the resolver output."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"def r: Resolver[IO, Int, String] = Resolver.lift(i => i.toString())\n\nval f: Field[IO, Int, String] = build.from(r)\n// f: Field[IO, Int, String] = Field(\n// resolve = gql.resolver.Resolver@7a72eb8a,\n// output = cats.Always@2be93db0,\n// description = None,\n// attributes = List()\n// )\n")),(0,i.kt)("p",null,"Sometimes type inference cannot find the proper type for a field:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build.from(Resolver.liftF(i => IO(i.toString())))\n// error: value liftF is not a member of object gql.resolver.Resolver\n// did you mean lift? or perhaps liftFull?\n// build.from(Resolver.liftF(i => IO(i.toString())))\n// ^^^^^^^^^^^^^^\n")),(0,i.kt)("p",null,"The type parameters for ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," are partially applied, such that when type inference isn't enough, types can be supplied explicitly."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build[IO, Int].from(Resolver.effect(i => IO(i.toString())))\n\nbuild.from(Resolver.effect((i: Int) => IO(i.toString())))\n")),(0,i.kt)("p",null,"For some fields, there is an even more concise syntax.\nInvoking the ",(0,i.kt)("inlineCode",{parentName:"p"},"apply")," method of ",(0,i.kt)("inlineCode",{parentName:"p"},"build"),", takes a higher order function that goes from the identity resolver (",(0,i.kt)("inlineCode",{parentName:"p"},"Resolver[F, A, A]"),") to some output."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"build[IO, Int](_.map(i => i * 2).evalMap(i => IO(i))): Field[IO, Int, Int]\n")),(0,i.kt)("h3",{id:"builders"},"Builders"),(0,i.kt)("p",null,"Complex structures may require many special resolver compositions.\nThe dsl also introduces a something akin to a builder pattern.\nThe ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," function from the previous section, creates a builder that has more constructors than just ",(0,i.kt)("inlineCode",{parentName:"p"},"from")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"apply"),"."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.dsl.FieldBuilder\nval b: FieldBuilder[IO, Int] = build[IO, Int]\n")),(0,i.kt)("p",null,"Often a builder is only relevant within a scope, thus one can end up having many unused builders in scope.\nThe ",(0,i.kt)("inlineCode",{parentName:"p"},"builder")," makes such code more concise:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"builder[IO, Int]{ (fb: FieldBuilder[IO, Int]) =>\n fb\n}\n")),(0,i.kt)("p",null,"The builder dsl contains most of the field related constructors:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'builder[IO, Int]{ fb =>\n fb.tpe(\n "Query",\n "answer" -> lift(i => i * 0 + 42),\n "pong" -> fb(_.map(_ => "pong"))\n ): Type[IO, Int]\n \n fb.fields(\n "answer" -> fb.lift(i => i * 0 + 42),\n "ping" -> fb.from(Resolver.lift(_ => "pong"))\n )\n}\n')),(0,i.kt)("h3",{id:"value-resolution"},"Value resolution"),(0,i.kt)("p",null,"Wrapping every field in a ",(0,i.kt)("inlineCode",{parentName:"p"},"build")," smart constructor and then defining the resolver seperately is a bit verbose.\nThere are smart constructors for two common variants of field resolvers, that lift a resolver function directly to a ",(0,i.kt)("inlineCode",{parentName:"p"},"Field"),"."),(0,i.kt)("p",null,"We must decide if the field is pure or effectful:"),(0,i.kt)("admonition",{type:"note"},(0,i.kt)("p",{parentName:"admonition"},"The effect constructor is named ",(0,i.kt)("inlineCode",{parentName:"p"},"eff")," to avoid collisions with cats-effect.")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'final case class Person(\n name: String\n)\n\ntpe[IO, Person](\n "Person",\n "name" -> lift(_.name),\n "nameEffect" -> eff(x => IO(x.name))\n)\n')),(0,i.kt)("p",null,"The ",(0,i.kt)("inlineCode",{parentName:"p"},"lift")," and ",(0,i.kt)("inlineCode",{parentName:"p"},"eff")," constructors can also also be supplied with arguments:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'def familyName = arg[String]("familyName")\n\ntpe[IO, Person](\n "Person",\n "name" -> lift(familyName)(_ + _.name),\n "nameEffect" -> eff(familyName)((f, p) => IO(p.name + f))\n)\n')),(0,i.kt)("h2",{id:"unification-instances"},"Unification instances"),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Union"),"s and ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface"),"s are abstract types that have implementations."),(0,i.kt)("p",null,(0,i.kt)("inlineCode",{parentName:"p"},"Union")," declares it's implementations up-front, like a ",(0,i.kt)("inlineCode",{parentName:"p"},"sealed trait"),".\nHowever, ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface")," implementations are declared on the types that implement the interface, like a ",(0,i.kt)("inlineCode",{parentName:"p"},"trait")," or an ",(0,i.kt)("inlineCode",{parentName:"p"},"abstract class"),"."),(0,i.kt)("p",null,"Before continuing, lets setup the environment."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},"trait Vehicle { \n def name: String\n}\nfinal case class Car(name: String) extends Vehicle\nfinal case class Boat(name: String) extends Vehicle\nfinal case class Truck(name: String) extends Vehicle\n\n")),(0,i.kt)("p",null,"For the ",(0,i.kt)("inlineCode",{parentName:"p"},"Union"),", variants can be declared using the ",(0,i.kt)("inlineCode",{parentName:"p"},"variant")," function, which takes a ",(0,i.kt)("inlineCode",{parentName:"p"},"PartialFunction")," from the unifying type to the implementation."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit def car: Type[IO, Car] = ???\nimplicit def boat: Type[IO, Boat] = ???\nimplicit def truck: Type[IO, Truck] = ???\n\nunion[IO, Vehicle]("Vehicle")\n .variant[Car] { case c: Car => c }\n .variant[Boat] { case b: Boat => b }\n .variant[Truck] { case t: Truck => t }\n')),(0,i.kt)("p",null,"A shorthand function exists, if the type of the variant is a subtype of the unifying type."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'union[IO, Vehicle]("Vehicle")\n .subtype[Car] \n .subtype[Boat] \n .subtype[Truck] \n')),(0,i.kt)("p",null,"For an ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface")," the same dsl exists, but is placed on the types that can implement the interface (a ",(0,i.kt)("inlineCode",{parentName:"p"},"Type")," or another ",(0,i.kt)("inlineCode",{parentName:"p"},"Interface"),")."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val vehicle: Interface[IO, Vehicle] = interface[IO, Vehicle](\n "Vehicle",\n "name" -> abst[IO, String]\n)\n\ntpe[IO, Car]("Car", "name" -> lift(_.name))\n .implements[Vehicle]{ case c: Car => c }\n \ntpe[IO, Boat]("Boat", "name" -> lift(_.name))\n .subtypeOf[Vehicle]\n \ntrait OtherVehicle extends Vehicle {\n def weight: Int\n}\n\ninterface[IO, OtherVehicle](\n "OtherVehicle",\n "weight" -> abst[IO, Int],\n // Since OtherVehicle is a subtype of Vehicle\n // we can directly embed the Vehicle fields\n vehicle.abstractFields: _*\n).implements[Vehicle]\n')),(0,i.kt)("h3",{id:"interface-inheritance"},"Interface inheritance"),(0,i.kt)("p",null,"It can be a bit cumbersome to implement an interface's fields every time it is extended.\nInterfaces accept any field type (abstract or concrete) as input.\nThis is convinient since it allows a safe type of inheritance.\nWhen using the ",(0,i.kt)("inlineCode",{parentName:"p"},"subtypeImpl")," function, all possible fields are added to the type."),(0,i.kt)("admonition",{type:"info"},(0,i.kt)("p",{parentName:"admonition"},"gql's inheritance has some implications:"),(0,i.kt)("ul",{parentName:"admonition"},(0,i.kt)("li",{parentName:"ul"},"If you're working an a ",(0,i.kt)("inlineCode",{parentName:"li"},"Type"),", only concrete fields can be inherited."),(0,i.kt)("li",{parentName:"ul"},"If you're working on an ",(0,i.kt)("inlineCode",{parentName:"li"},"Interface"),", all fields, concrete and abstract can be inherited.")),(0,i.kt)("p",{parentName:"admonition"},"gql picks the best field when you inherit from an interface.\nFor two fields with the same name, gql will always pick the concrete field.\nIf both are concrete, it will prioritize the field from the subtype (the type you're working on).")),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'trait Pet {\n def name: String\n def age: Int\n def weight: Double\n}\n\ncase class Dog(name: String, age: Int, weight: Double) extends Pet\n\nimplicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](\n "Pet",\n "name" -> lift(_.name),\n "age" -> lift(_.age),\n "weight" -> lift(_.weight)\n)\n\nlazy val overwirttenName = lift[Dog](_.name)\n\nimplicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](\n "Dog",\n "bark" -> lift(_ => "woof!"),\n "name" -> overwirttenName\n).subtypeImpl[Pet]\n\ndog.fields.map{ case (k, _) => k}.mkString_(", ")\n// res13: String = "bark, name, age, weight"\n\n// The Dog type has it\'s own implementation of the name field\ndog.fields.exists{ case (_, v) => v == overwirttenName }\n// res14: Boolean = true\n')),(0,i.kt)("p",null,"To showcase the inheritance a bit further, consider the following invalid schema."),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](\n "Pet",\n "name" -> lift(_.name),\n "age" -> lift(_.age),\n // Notice that weight is abstract\n "weight" -> abst[IO, Double]\n)\n\nimplicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](\n "Dog",\n "bark" -> lift(_ => "woof!")\n).subtypeImpl[Pet]\n\n// We are missing the weight field\ndog.fields.map{ case (k, _) => k}.mkString_(", ")\n// res15: String = "bark, name, age"\n')),(0,i.kt)("admonition",{type:"tip"},(0,i.kt)("p",{parentName:"admonition"},(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/#validation"},"Schema validation")," will catch such errors.")),(0,i.kt)("h2",{id:"input-types"},"Input types"),(0,i.kt)("p",null,"Review the ",(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/input_types"},"Input types")," section for more information."),(0,i.kt)("h2",{id:"other-output-structures"},"Other output structures"),(0,i.kt)("p",null,"Examples of other structures can be in the ",(0,i.kt)("a",{parentName:"p",href:"/gql/docs/server/schema/output_types"},"Output types")," section."),(0,i.kt)("h3",{id:"covariant-effects"},"Covariant effects"),(0,i.kt)("p",null,"Output types in gql are covariant in ",(0,i.kt)("inlineCode",{parentName:"p"},"F"),", such that output types written in different effects seamlessly weave together.\n",(0,i.kt)("inlineCode",{parentName:"p"},"fs2")," provides a type that we can reuse for pure effects defined as ",(0,i.kt)("inlineCode",{parentName:"p"},"type Pure[A] <: Nothing"),"."),(0,i.kt)("p",null,"With this trick, we can define gql types for trivial cases of our domain:"),(0,i.kt)("pre",null,(0,i.kt)("code",{parentName:"pre",className:"language-scala"},'final case class Entity(\n name: String,\n age: Int\n)\n\nobject Entity {\n implicit lazy val gqlType: Type[fs2.Pure, Entity] = tpe[fs2.Pure, Entity](\n "Entity",\n "name" -> lift(_.name),\n "age" -> lift(_.age)\n )\n}\n\ntrait Example\n\ntpe[IO, Example](\n "Example",\n "entity" -> lift(_ => Entity("John Doe", 42))\n)\n')))}m.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4f169309.9c6adec9.js b/assets/js/4f169309.9c6adec9.js new file mode 100644 index 00000000..ffe1ce3f --- /dev/null +++ b/assets/js/4f169309.9c6adec9.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[381],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=r,h=c["".concat(s,".").concat(u)]||c[u]||d[u]||o;return t?a.createElement(h,i(i({ref:n},m),{},{components:t})):a.createElement(h,i({ref:n},m))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var o=t.length,i=new Array(o);i[0]=c;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l.mdxType="string"==typeof e?e:r,i[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const o={title:"Resolvers"},i=void 0,l={unversionedId:"server/schema/resolvers",id:"server/schema/resolvers",title:"Resolvers",description:"Resolvers are at the core of gql; a resolver Resolver[F, I, O] takes an I and produces an O in effect F.",source:"@site/docs/server/schema/resolvers.md",sourceDirName:"server/schema",slug:"/server/schema/resolvers",permalink:"/gql/docs/server/schema/resolvers",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/resolvers.md",tags:[],version:"current",frontMatter:{title:"Resolvers"},sidebar:"docs",previous:{title:"Monadic Resolver DSL",permalink:"/gql/docs/server/schema/arrow_dsl"},next:{title:"The schema",permalink:"/gql/docs/server/schema/"}},s={},p=[{value:"Resolvers",id:"resolvers",level:2},{value:"Lift",id:"lift",level:3},{value:"Effect",id:"effect",level:3},{value:"Arguments",id:"arguments",level:3},{value:"Meta",id:"meta",level:3},{value:"Errors",id:"errors",level:3},{value:"First",id:"first",level:3},{value:"Batch",id:"batch",level:3},{value:"Batch resolver syntax",id:"batch-resolver-syntax",level:4},{value:"Batchers from elsewhere",id:"batchers-from-elsewhere",level:4},{value:"Inline batch",id:"inline-batch",level:3},{value:"Choice",id:"choice",level:3},{value:"Stream",id:"stream",level:3},{value:"Stream semantics",id:"stream-semantics",level:4},{value:"Steps",id:"steps",level:2}],m={toc:p};function d(e){let{components:n,...t}=e;return(0,r.kt)("wrapper",(0,a.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Resolvers are at the core of gql; a resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]")," takes an ",(0,r.kt)("inlineCode",{parentName:"p"},"I")," and produces an ",(0,r.kt)("inlineCode",{parentName:"p"},"O")," in effect ",(0,r.kt)("inlineCode",{parentName:"p"},"F"),".\nResolvers are embedded in fields and act as continuations.\nWhen gql executes a query it first constructs a tree of continueations from your schema and the supplied GraphQL query."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"s act and compose like functions with combinators such as ",(0,r.kt)("inlineCode",{parentName:"p"},"andThen")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"compose"),"."),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Arrow")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice"),".")),(0,r.kt)("p",null,"Lets start off with some imports:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql._\nimport gql.dsl.all._\nimport gql.resolver._\nimport gql.ast._\nimport cats.effect._\nimport cats.implicits._\nimport cats.data._\n")),(0,r.kt)("h2",{id:"resolvers"},"Resolvers"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," is a collection of high-level combinators that constructs a tree of ",(0,r.kt)("inlineCode",{parentName:"p"},"Step"),"."),(0,r.kt)("admonition",{type:"note"},(0,r.kt)("p",{parentName:"admonition"},"If you are familiar with the relationship between ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Pull"),", then the relationship between ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," should be familiar.")),(0,r.kt)("h3",{id:"lift"},"Lift"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver.lift")," lifts a function ",(0,r.kt)("inlineCode",{parentName:"p"},"I => O")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"lift"),"'s method form is ",(0,r.kt)("inlineCode",{parentName:"p"},"map"),", which for any resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]")," produces a new resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O2]")," given a function ",(0,r.kt)("inlineCode",{parentName:"p"},"O => O2"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"val r = Resolver.lift[IO, Int](_.toLong)\n// r: Resolver[IO, Int, Long] = gql.resolver.Resolver@2ddcee48\nr.map(_.toString())\n// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@449ecee8\n")),(0,r.kt)("h3",{id:"effect"},"Effect"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"effect")," like ",(0,r.kt)("inlineCode",{parentName:"p"},"lift")," lifts a function, but instead an effectful one like ",(0,r.kt)("inlineCode",{parentName:"p"},"I => F[O]")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"effect"),"'s method form is ",(0,r.kt)("inlineCode",{parentName:"p"},"evalMap")," (like ",(0,r.kt)("inlineCode",{parentName:"p"},"Resource")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream"),")."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"val r = Resolver.effect[IO, Int](i => IO(i.toLong))\n// r: Resolver[IO, Int, Long] = gql.resolver.Resolver@2ff877c3\nr.evalMap(l => IO(l.toString()))\n// res1: Resolver[[x]IO[x], Int, String] = gql.resolver.Resolver@266a7a55\n")),(0,r.kt)("h3",{id:"arguments"},"Arguments"),(0,r.kt)("p",null,"Arguments in gql are provided through resolvers.\nA resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, A]")," can be constructed from an argument ",(0,r.kt)("inlineCode",{parentName:"p"},"Arg[A]"),", through either ",(0,r.kt)("inlineCode",{parentName:"p"},"argument")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"arg")," in method form."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val ageArg = arg[Int]("age")\nval r = Resolver.argument[IO, Nothing, String](arg[String]("name"))\n// r: Resolver[IO, Nothing, String] = gql.resolver.Resolver@6c8e7e7e\nval r2 = r.arg(ageArg)\n// r2: Resolver[IO, Nothing, (Int, String)] = gql.resolver.Resolver@5ed51489\nr2.map{ case (age, name) => s"$name is $age years old" }\n// res2: Resolver[IO, Nothing, String] = gql.resolver.Resolver@32ff26ff\n')),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Arg")," also has an applicative defined for it, so multi-argument resolution can be simplified to."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.argument[IO, Nothing, (String, Int)](\n (arg[String]("name"), arg[Int]("age")).tupled\n)\n// r: Resolver[IO, Nothing, (String, Int)] = gql.resolver.Resolver@47325a27\nr.map{ case (age, name) => s"$name is $age years old" }\n// res3: Resolver[IO, Nothing, String] = gql.resolver.Resolver@5850b131\n')),(0,r.kt)("h3",{id:"meta"},"Meta"),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"meta")," resolver provides metadata regarding query execution, such as the position of query execution, field aliasing and the provided arguments."),(0,r.kt)("p",null,"It also allows the caller to inspect the query ast such that more exotic operations become possible.\nFor instance, arguments can dynamically be inspected."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val a = arg[Int]("age")\nResolver.meta[IO, String].map(meta => meta.astNode.arg(a))\n// res4: Resolver[IO, String, Option[Int]] = gql.resolver.Resolver@2e5787ee\n')),(0,r.kt)("p",null,"The ",(0,r.kt)("a",{parentName:"p",href:"/gql/docs/server/integrations/relational"},"relational")," integration makes heavy use of this feature."),(0,r.kt)("h3",{id:"errors"},"Errors"),(0,r.kt)("p",null,"Errors are reported in ",(0,r.kt)("inlineCode",{parentName:"p"},"cats.data.Ior"),"."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"An ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," is a non-exclusive ",(0,r.kt)("inlineCode",{parentName:"p"},"Either"),".")),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," datatype's left side must be ",(0,r.kt)("inlineCode",{parentName:"p"},"String")," and acts as an optional error that will be present in the query result.\ngql can return an error and a result for the same path, given that ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," has both it's left and right side defined."),(0,r.kt)("p",null,"Errors are embedded into resolvers via ",(0,r.kt)("inlineCode",{parentName:"p"},"rethrow"),".\nThe extension method ",(0,r.kt)("inlineCode",{parentName:"p"},"rethrow")," is present on any resolver of type ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, Ior[String, O]]"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.lift[IO, Int](i => Ior.Both("I will be in the errors :)", i))\n// r: Resolver[IO, Int, Ior.Both[String, Int]] = gql.resolver.Resolver@65d48405\nr.rethrow\n// res5: Resolver[[A]IO[A], Int, Int] = gql.resolver.Resolver@306219fa\n')),(0,r.kt)("p",null,"We can also use ",(0,r.kt)("inlineCode",{parentName:"p"},"emap")," to map the current value into an ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.id[IO, Int].emap(i => Ior.Both("I will be in the errors :)", i))\n// r: Resolver[IO, Int, Int] = gql.resolver.Resolver@6de6effe\n')),(0,r.kt)("h3",{id:"first"},"First"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," also implements ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," (",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, A, B] => Resolver[F, (A, C), (B, C)]"),") which can be convinient for situations where one would usually have to trace a value through an entire computation."),(0,r.kt)("p",null,"Since a ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," does not form a ",(0,r.kt)("inlineCode",{parentName:"p"},"Monad"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," is necessary to implement non-trivial resolver compositions."),(0,r.kt)("p",null,"For instance, maybe your program contains a general resolver compositon that is used many places, like say verifying credentials, but you'd like to trace a value through it without having to keep track of tupling output with input."),(0,r.kt)("p",null,"Assume we'd like to implement a resolver, that when given a person's name, can get a list of the person's friends."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'case class PersonId(value: Int)\n\ncase class Person(id: PersonId, name: String)\n\ndef getFriends(id: PersonId, limit: Int): IO[List[Person]] = ???\n\ndef getPerson(name: String): IO[Person] = ???\n\ndef getPersonResolver = Resolver.effect[IO, String](getPerson)\n\ndef limitResolver = Resolver.argument[IO, Person, Int](arg[Int]("limit"))\n\ndef limitArg = arg[Int]("limit")\ngetPersonResolver\n // \'arg\' tuples the input with the argument value\n .arg(limitArg)\n .evalMap{ case (limit, p) => getFriends(p.id, limit) }\n// res6: Resolver[[x]IO[x], String, List[Person]] = gql.resolver.Resolver@7f9c6d5b\n')),(0,r.kt)("h3",{id:"batch"},"Batch"),(0,r.kt)("p",null,"Like most other GraphQL implementations, gql also supports batching."),(0,r.kt)("p",null,"Unlike most other GraphQL implementations, gql's batching implementation features a global query planner that lets gql delay field execution until it can be paired with another field."),(0,r.kt)("p",null,"Batch declaration and usage occurs as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Declare a function ",(0,r.kt)("inlineCode",{parentName:"li"},"Set[K] => F[Map[K, V]]"),"."),(0,r.kt)("li",{parentName:"ul"},"Give this function to gql and get back a ",(0,r.kt)("inlineCode",{parentName:"li"},"Resolver[F, Set[K], Map[K, V]]")," in a ",(0,r.kt)("inlineCode",{parentName:"li"},"State")," monad (for unique id generation)."),(0,r.kt)("li",{parentName:"ul"},"Use this new resolver where you want batching.")),(0,r.kt)("p",null,"And now put into practice:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def getPeopleFromDB(ids: Set[PersonId]): IO[List[Person]] = ???\n\nResolver.batch[IO, PersonId, Person]{ keys => \n getPeopleFromDB(keys).map(_.map(x => x.id -> x).toMap)\n}\n// res7: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@3e292a01\n")),(0,r.kt)("p",null,"Whenever gql sees this resolver in any composition, it will look for similar resolvers during query planning."),(0,r.kt)("p",null,"Note, however, that you should only declare each batch resolver variant ",(0,r.kt)("strong",{parentName:"p"},"once"),", that is, you should build your schema in ",(0,r.kt)("inlineCode",{parentName:"p"},"State"),".\ngql consideres different batch instantiations incompatible regardless of any type information."),(0,r.kt)("p",null,"State has ",(0,r.kt)("inlineCode",{parentName:"p"},"Monad")," (and transitively ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative"),") defined for it, so it composes well.\nHere is an example of multiple batchers:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def b1 = Resolver.batch[IO, Int, Person](_ => ???)\ndef b2 = Resolver.batch[IO, Int, String](_ => ???)\n\n(b1, b2).tupled\n// res8: State[SchemaState[IO], (Resolver[IO, Set[Int], Map[Int, Person]], Resolver[IO, Set[Int], Map[Int, String]])] = cats.data.IndexedStateT@1e2459f7\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Even if your field doesn't benefit from batching, batching can still do duplicate key elimination.")),(0,r.kt)("h4",{id:"batch-resolver-syntax"},"Batch resolver syntax"),(0,r.kt)("p",null,"When a resolver in a very specific form ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, Set[K], Map[K, V]]"),", then the gql dsl provides some helper methods.\nFor instance, a batcher may be embedded in a singular context (",(0,r.kt)("inlineCode",{parentName:"p"},"K => V"),").\nHere is a showcase of some of the helper methods:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def pb: Resolver[IO, Set[Int], Map[Int, Person]] = \n // Stub implementation\n Resolver.lift(_ => Map.empty)\n\n// None if a key is missing\npb.all[List]\n// res9: Resolver[[A]IO[A], List[Int], List[Option[Person]]] = gql.resolver.Resolver@4467dcd2\n\n// Every key must have an associated value\n// or else raise an error via a custom show-like typeclass\nimplicit lazy val showMissingPersonId =\n ShowMissingKeys.showForKey[Int]("not all people could be found")\npb.traversable[List]\n// res10: Resolver[[A]IO[A], List[Int], List[Person]] = gql.resolver.Resolver@588d87b0\n\n// Maybe there is one value for one key?\npb.opt\n// res11: Resolver[[A]IO[A], Int, Option[Person]] = gql.resolver.Resolver@2f03c782\n\n// Same as opt\npb.all[cats.Id]\n// res12: Resolver[[A]IO[A], cats.package.Id[Int], cats.package.Id[Option[Person]]] = gql.resolver.Resolver@13d4c874\n\n// There is always one value for one key\npb.one\n// res13: Resolver[[A]IO[A], Int, Person] = gql.resolver.Resolver@69af0790\n\n// You can be more explicit via the `batch` method\npb.batch.all[NonEmptyList]\n// res14: Resolver[[A]IO[A], NonEmptyList[Int], NonEmptyList[Option[Person]]] = gql.resolver.Resolver@5dabad12\n')),(0,r.kt)("p",null,"Using ",(0,r.kt)("inlineCode",{parentName:"p"},"batch")," aids with better compiler error messages."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Resolver.lift[IO, Int](_.toString()).batch.all\n// error: Cannot prove that Set[K] =:= Int.\n// Resolver.lift[IO, Int](_.toString()).batch.all\n// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"For larger programs, consider declaring all your batchers up-front and putting them into some type of collection:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"case class MyBatchers(\n personBatcher: Resolver[IO, Set[Int], Map[Int, Person]],\n intStringBatcher: Resolver[IO, Set[Int], Map[Int, String]]\n)\n\n(b1, b2).mapN(MyBatchers.apply)\n// res16: State[SchemaState[IO], MyBatchers] = cats.data.IndexedStateT@43b8091b\n")),(0,r.kt)("p",{parentName:"admonition"},"For most batchers it is likely that you eventually want to pre-compose them in various ways, for instance requsting args, which this pattern promotes.")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Sometimes you have multiple groups of fields in the same object where each group have different performance overheads."),(0,r.kt)("p",{parentName:"admonition"},"Say you had a ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," object in your database.\nThis ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," object also exists in a remote api.\nThis remote api can tell you, the friends of a ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," given the object's id and name.\nWritten out a bit more structured we have that:"),(0,r.kt)("ul",{parentName:"admonition"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonId")," (identity)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonDB")," (database query)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonDB => PersonRemoteAPI")," (remote api call)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonRemoteAPI")," (composition of database query and remote api call)")),(0,r.kt)("p",{parentName:"admonition"},"And now put into code:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'// We have a trivial id field for our person id\ndef pureFields = fields[IO, PersonId](\n "id" -> lift(id => id)\n)\n\n// If we query our database with a person id, we get a person database object\ncase class PersonDB(\n id: PersonId, \n name: String, \n remoteApiId: String\n)\n\n// SELECT id, name, remote_api_id FROM person WHERE id in (...)\ndef dbBatchResolver: Resolver[IO, PersonId, PersonDB] = ???\n\n// From the db we can get the name and the remote api id\ndef dbFields = fields[IO, PersonDB](\n "name" -> lift(_.name),\n "apiId" -> lift(_.remoteApiId)\n)\n\n// The remote api data can be found given the result of a db query\ncase class PersonRemoteAPI(\n id: PersonId, \n friends: List[PersonId]\n)\n\n// Given a PersonDB we can call the api (via a batched GET or something)\ndef personBatchResolver: Resolver[IO, PersonDB, PersonRemoteAPI] = ???\n\n// We can get the friends from the remote api\ndef remoteApiFields = fields[IO, PersonRemoteAPI](\n "friends" -> lift(_.friends)\n)\n\n// Now we can start composing our fields\n// We can align the types of the db and remote api data to the PersonDB type\n// by composing the remote api resolver on the remote api fields\ndef dbFields2: Fields[IO, PersonDB] = \n remoteApiFields.compose(personBatchResolver) ::: dbFields\n\n// Given a PersonId we have every field\n// If "friends" is selected, gql will first run `dbBatchResolver` and then `personBatchResolver`\ndef allFields = dbFields2.compose(dbBatchResolver) ::: pureFields\n\nimplicit def person: Type[IO, PersonId] = tpeNel[IO, PersonId](\n "Person",\n allFields\n)\n')),(0,r.kt)("p",{parentName:"admonition"},"The general pattern for this decomposition revolves around figuring out what the most basic description of your object is.\nIn this example, every fields can (eventually through various side-effects) be resolved from just ",(0,r.kt)("inlineCode",{parentName:"p"},"PersonId"),".")),(0,r.kt)("h4",{id:"batchers-from-elsewhere"},"Batchers from elsewhere"),(0,r.kt)("p",null,"Most batching implementations have compatible signatures and can be adapted into a gql batcher."),(0,r.kt)("p",null,"For instance, converting ",(0,r.kt)("inlineCode",{parentName:"p"},"fetch")," to gql:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import fetch._\nobject People extends Data[PersonId, Person] {\n def name = "People"\n\n def source: DataSource[IO, PersonId, Person] = ???\n}\n\nResolver\n .batch[IO, PersonId, Person](_.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty)))\n// res17: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@1b284b1b\n')),(0,r.kt)("h3",{id:"inline-batch"},"Inline batch"),(0,r.kt)("p",null,"A batch resolver can also be defined inline with some notable differences to the regular batch resolver:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"It does not need to be defined in state."),(0,r.kt)("li",{parentName:"ul"},"It is not subject to global query planning, and is only ever called with inputs from the same selection.")),(0,r.kt)("p",null,"The inline batch resolver has the same signature as a regular batch resolver; ",(0,r.kt)("inlineCode",{parentName:"p"},"Set[K] => F[Map[K, V]]"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Resolver.inlineBatch[IO, PersonId, Person](\n _.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty))\n)\n// res18: Resolver[IO, Set[PersonId], Map[PersonId, Person]] = gql.resolver.Resolver@64d638e3\n")),(0,r.kt)("h3",{id:"choice"},"Choice"),(0,r.kt)("p",null,"Resolvers also implement ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice")," via ",(0,r.kt)("inlineCode",{parentName:"p"},"(Resolver[F, A, C], Resolver[F, B, D]) => Resolver[F, Either[A, B], Either[C, D]]"),".\nOn the surface, this combinator may have limited uses, but with a bit of composition we can perform tasks such as caching."),(0,r.kt)("p",null,"For instance, a combinator derived from ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"skippable: Resolver[F, I, O] => Resolver[F, Either[I, O], O]"),', which acts as a variant of "caching".\nIf the right side is present we skip the underlying resolver (',(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),") altogether."),(0,r.kt)("p",null,"For any resolver in the form ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, Either[L, R]]")," we modify the left side with ",(0,r.kt)("inlineCode",{parentName:"p"},"leftThrough")," and the right with ",(0,r.kt)("inlineCode",{parentName:"p"},"rightThrough"),"."),(0,r.kt)("p",null,"For Instance we can implement caching."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def getPersonForId(id: PersonId): IO[Person] = ???\n\ntype CachedPerson = Either[PersonId, Person]\ndef cachedPerson = tpe[IO, CachedPerson](\n "Person",\n "id" -> lift(_.map(_.id).merge.value),\n // We\'ll align the left and right side of the choice and then merge the `Either`\n "name" -> build[IO, CachedPerson](_.leftThrough(_.evalMap(getPersonForId)).map(_.merge.name))\n)\n')),(0,r.kt)("p",null,"We can also use some of the ",(0,r.kt)("inlineCode",{parentName:"p"},"compose")," tricks from the ",(0,r.kt)("a",{parentName:"p",href:"#batch-resolver-syntax"},"batch resolver syntax section")," if we have a lot of fields that depend on ",(0,r.kt)("inlineCode",{parentName:"p"},"Person"),". "),(0,r.kt)("admonition",{type:"note"},(0,r.kt)("p",{parentName:"admonition"},"The query planner treats the choice branches as parallel, such that for two instances of a choice, resolvers in the two branches may be batched together.")),(0,r.kt)("h3",{id:"stream"},"Stream"),(0,r.kt)("p",null,"The stream resolver embeds an ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream")," and provides the ability to emit a stream of results for a graphql subscription."),(0,r.kt)("h4",{id:"stream-semantics"},"Stream semantics"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"When one or more streams emit, the interpreter will re-evaluate the query from the position that emitted.\nThat is, only the sub-tree that changed will be re-interpreted."),(0,r.kt)("li",{parentName:"ul"},"If two streams emit and one occurs as a child of the other, the child will be ignored since it will be replaced."),(0,r.kt)("li",{parentName:"ul"},"By default, the interpreter will only respect the most-recent emitted data.")),(0,r.kt)("p",null,"This means that by default, gql assumes that your stream should behave like a signal, not sequentially.\nHowever, gql can also adhere sequential semantics."),(0,r.kt)("p",null,"For instance a schema designed like the following, emits incremental updates regarding the price for some symbol:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-graphql"},"type PriceChange {\n difference: Float!\n}\n\ntype Subscription {\n priceChanges(symbolId: ID!): PriceChange!\n}\n")),(0,r.kt)("p",null,"And here is a schema that represents an api that emits updates regarding the current price of a symbol:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-graphql"},"type SymbolState {\n price: Float!\n}\n\ntype Subscription {\n price(symbolId: ID!): SymbolState!\n}\n")),(0,r.kt)("p",null,"Consider the following example where two different evaluation semantics are displayed:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"case class PriceChange(difference: Float)\ndef priceChanges(symbolId: String): fs2.Stream[IO, PriceChange] = ???\n\ncase class SymbolState(price: Float)\ndef price(symbolId: String): fs2.Stream[IO, SymbolState] = ???\n\ndef priceChangesResolver = Resolver.id[IO, String].sequentialStreamMap(priceChanges)\n\ndef priceResolver = Resolver.id[IO, String].streamMap(price)\n")),(0,r.kt)("p",null,"If your stream is sequential, gql will only pull elements when they are needed."),(0,r.kt)("p",null,"The interpreter performs a global re-interpretation of your schema, when one or more streams emit.\nThat is, the interpreter cycles through the following two phases:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Interpret for the current values."),(0,r.kt)("li",{parentName:"ul"},"Await new values (and values that arrived during the previous step).")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Since gql is free to ignore updates when a stream is a signal, one should prefer ",(0,r.kt)("inlineCode",{parentName:"p"},"evalMap")," on a ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," instead of a stream if possible.")),(0,r.kt)("admonition",{type:"warning"},(0,r.kt)("p",{parentName:"admonition"},"For a given stream it must hold all child resources alive (maybe the child resources are also streams that may emit).\nAs such, for a given stream, gql must await a next element from the stream before releasing any currently held resources sub-tree.\nThis means that gql must be able to pull one element before closing the old one.")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"If you have streams of updates where you are only interested in that something changed (",(0,r.kt)("inlineCode",{parentName:"p"},"Stream[F, Unit]"),") there may be room for significant optimization.\nIn ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2")," you can merge streams with combinators such as ",(0,r.kt)("inlineCode",{parentName:"p"},"parJoin"),", but they have to assume that there may be resources to account for.\nIf you are discarding the output of the stream or you are absolutely sure that the output does not depend on a resource lifetime,\none can write more optimized versions functions for this purpose."),(0,r.kt)("details",null,(0,r.kt)("summary",null,"Some examples of potentially more performant implementations"),(0,r.kt)("p",{parentName:"admonition"},"In a crude benchmarks, these combinators may perform an order of magnitude faster than ",(0,r.kt)("inlineCode",{parentName:"p"},"parJoin")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"merge"),"."),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import fs2.{Pipe, Stream}\nimport fs2.concurrent._\ndef parListen[A]: Pipe[IO, Stream[IO, A], Unit] =\n streams =>\n for {\n d <- Stream.eval(IO.deferred[Either[Throwable, Unit]])\n c <- Stream.eval(IO.deferred[Unit])\n sigRef <- Stream.eval(SignallingRef[IO, Unit](()))\n\n bg = streams.flatMap { sub =>\n Stream.supervise {\n sub\n .evalMap(_ => sigRef.set(()))\n .compile\n .drain\n .onError(e => d.complete(Left(e)).void)\n .onCancel(c.complete(()).void)\n }.void\n }\n\n listenCancel = (c.get *> IO.canceled).as(Right(()): Either[Throwable, Unit])\n fg = sigRef.discrete.interruptWhen(d).interruptWhen(listenCancel)\n\n _ <- fg.concurrently(bg)\n } yield ()\n\ndef parListenSignal[A]: Pipe[IO, Stream[IO, A], A] =\n streams =>\n Stream.eval(SignallingRef.of[IO, Option[A]](None)).flatMap { sig =>\n sig.discrete.unNone.concurrently {\n streams.parEvalMapUnorderedUnbounded { x =>\n x.evalMap(x => sig.set(Some(x))).compile.drain\n }\n }\n }\n")))),(0,r.kt)("p",null,"Here is an example of some streams in action:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import scala.concurrent.duration._\nimport cats.effect.unsafe.implicits.global\n\ncase class Streamed(value: Int)\n\nimplicit lazy val streamed: Type[IO, Streamed] = tpe[IO, Streamed](\n "Streamed",\n "value" -> build[IO, Streamed](_.streamMap{ s =>\n fs2.Stream\n .bracket(IO(println(s"allocating $s")))(_ => IO(println(s"releasing $s"))) >>\n fs2.Stream\n .iterate(0)(_ + 1)\n .evalTap(n => IO(println(s"emitting $n for $s")))\n .meteredStartImmediately(((5 - s.value) * 20).millis)\n .as(Streamed(s.value + 1))\n })\n)\n\ndef query = """\n subscription {\n streamed {\n value {\n value { \n value {\n __typename\n }\n }\n }\n }\n }\n"""\n\ndef schema = SchemaShape.unit[IO](\n fields("ping" -> lift(_ => "pong")),\n subscription = Some(fields("streamed" -> lift(_ => Streamed(0))))\n)\n\nSchema.simple(schema)\n .map(Compiler[IO].compile(_, query))\n .flatMap { case Right(Application.Subscription(stream)) => stream.take(4).compile.drain }\n .unsafeRunSync()\n// allocating Streamed(0)\n// emitting 0 for Streamed(0)\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// emitting 1 for Streamed(2)\n// emitting 1 for Streamed(1)\n// emitting 1 for Streamed(0)\n// allocating Streamed(2)\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// emitting 0 for Streamed(2)\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// emitting 2 for Streamed(2)\n// emitting 2 for Streamed(1)\n// emitting 1 for Streamed(2)\n// allocating Streamed(2)\n// emitting 1 for Streamed(2)\n// emitting 0 for Streamed(2)\n// emitting 2 for Streamed(0)\n// releasing Streamed(1)\n// emitting 3 for Streamed(2)\n// releasing Streamed(2)\n// releasing Streamed(2)\n// releasing Streamed(0)\n// releasing Streamed(2)\n// releasing Streamed(2)\n// releasing Streamed(1)\n')),(0,r.kt)("p",null,"gql also allows the user to specify how much time the interpreter may await more stream updates:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Schema.simple(schema).map(Compiler[IO].compile(_, query, accumulate=Some(10.millis)))\n")),(0,r.kt)("p",null,"furthermore, gql can also emit interpreter information if you want to look into what gql is doing:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Schema.simple(schema)\n .map(Compiler[IO].compile(_, query, debug=gql.server.interpreter.DebugPrinter[IO](s => IO(println(s)))))\n .flatMap { case Right(Application.Subscription(stream)) => stream.take(3).compile.drain }\n .unsafeRunSync()\n// allocating Streamed(0)\n// emitting 0 for Streamed(0)\n// publishing at index 0 at root.streamed.value\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// publishing at index 0 at root.streamed.value.value\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// publishing at index 0 at root.streamed.value.value.value\n// unconsing with current tree:\n// |- unknown-cats.effect.kernel.Unique$Token@5085ad30\n// got state, awaiting a non-empty state (publication)\n// emitting 1 for Streamed(2)\n// publishing at index 1 at root.streamed.value.value.value\n// done publishing at index 1 at root.streamed.value.value.value, await? true\n// got non-empty state, awaiting 5 milliseconds\n// unconsed:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = StreamData(\n// cont = Continuation.Done(\n// Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = __typename,\n// alias = None,\n// cont = PreparedCont(\n// edges = Lift(...),\n// cont = PreparedLeaf(String)\n// )\n// )\n// }\n// )\n// )\n// ),\n// value = Right(repl.MdocSession$MdocApp$Streamed$1)\n// )\n// )\n// ]\n// emitting 1 for Streamed(1)\n// publishing at index 1 at root.streamed.value.value\n// done publishing at index 1 at root.streamed.value.value, await? true\n// unconsed after removing old children:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = ditto\n// )\n// ]\n// tree after unconsing:\n// |- unknown-cats.effect.kernel.Unique$Token@5085ad30\n// emitting 1 elements from uncons\n// interpreting for 1 inputs\n// done interpreting\n// unconsing with current tree:\n// |- unknown-cats.effect.kernel.Unique$Token@5085ad30\n// got state, awaiting a non-empty state (publication)\n// got non-empty state, awaiting 5 milliseconds\n// emitting 1 for Streamed(0)\n// publishing at index 1 at root.streamed.value\n// done publishing at index 1 at root.streamed.value, await? true\n// unconsed:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = StreamData(\n// cont = Continuation.Done(\n// Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = value,\n// alias = None,\n// cont = PreparedCont(\n// edges = Compose(\n// left = Compose(left = Lift(...), right = Lift(...)),\n// right = EmbedStream(signal = true)\n// ),\n// cont = Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = __typename,\n// alias = None,\n// cont = PreparedCont(\n// edges = Lift(...),\n// cont = PreparedLeaf(String)\n// )\n// )\n// }\n// )\n// )\n// )\n// )\n// }\n// )\n// )\n// ),\n// value = Right(repl.MdocSession$MdocApp$Streamed$1)\n// )\n// )\n// ]\n// unconsed after removing old children:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = ditto\n// )\n// ]\n// tree after unconsing:\n// |- unknown-cats.effect.kernel.Unique$Token@5085ad30\n// emitting 1 elements from uncons\n// interpreting for 1 inputs\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// publishing at index 0 at root.streamed.value.value.value\n// done interpreting\n// releasing Streamed(0)\n// releasing Streamed(1)\n// releasing Streamed(2)\n// releasing Streamed(2)\n")),(0,r.kt)("h2",{id:"steps"},"Steps"),(0,r.kt)("p",null,"A ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," is the low-level algebra for a resolver, that describes a single step of evaluation for a query.\nThe variants of ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," are clearly listed in the source code. All variants of step provide orthogonal properties."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/4f169309.ad8bcbf1.js b/assets/js/4f169309.ad8bcbf1.js deleted file mode 100644 index 8ad241a7..00000000 --- a/assets/js/4f169309.ad8bcbf1.js +++ /dev/null @@ -1 +0,0 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[381],{3905:(e,n,t)=>{t.d(n,{Zo:()=>m,kt:()=>u});var a=t(7294);function r(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function o(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);n&&(a=a.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,a)}return t}function i(e){for(var n=1;n=0||(r[t]=e[t]);return r}(e,n);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(a=0;a=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(r[t]=e[t])}return r}var s=a.createContext({}),p=function(e){var n=a.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},m=function(e){var n=p(e.components);return a.createElement(s.Provider,{value:n},e.children)},d={inlineCode:"code",wrapper:function(e){var n=e.children;return a.createElement(a.Fragment,{},n)}},c=a.forwardRef((function(e,n){var t=e.components,r=e.mdxType,o=e.originalType,s=e.parentName,m=l(e,["components","mdxType","originalType","parentName"]),c=p(t),u=r,h=c["".concat(s,".").concat(u)]||c[u]||d[u]||o;return t?a.createElement(h,i(i({ref:n},m),{},{components:t})):a.createElement(h,i({ref:n},m))}));function u(e,n){var t=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var o=t.length,i=new Array(o);i[0]=c;var l={};for(var s in n)hasOwnProperty.call(n,s)&&(l[s]=n[s]);l.originalType=e,l.mdxType="string"==typeof e?e:r,i[1]=l;for(var p=2;p{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>d,frontMatter:()=>o,metadata:()=>l,toc:()=>p});var a=t(7462),r=(t(7294),t(3905));const o={title:"Resolvers"},i=void 0,l={unversionedId:"server/schema/resolvers",id:"server/schema/resolvers",title:"Resolvers",description:"Resolvers are at the core of gql; a resolver Resolver[F, I, O] takes an I and produces an O in effect F.",source:"@site/docs/server/schema/resolvers.md",sourceDirName:"server/schema",slug:"/server/schema/resolvers",permalink:"/gql/docs/server/schema/resolvers",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/resolvers.md",tags:[],version:"current",frontMatter:{title:"Resolvers"},sidebar:"docs",previous:{title:"Monadic Resolver DSL",permalink:"/gql/docs/server/schema/arrow_dsl"},next:{title:"The schema",permalink:"/gql/docs/server/schema/"}},s={},p=[{value:"Resolvers",id:"resolvers",level:2},{value:"Lift",id:"lift",level:3},{value:"Effect",id:"effect",level:3},{value:"Arguments",id:"arguments",level:3},{value:"Meta",id:"meta",level:3},{value:"Errors",id:"errors",level:3},{value:"First",id:"first",level:3},{value:"Batch",id:"batch",level:3},{value:"Batch resolver syntax",id:"batch-resolver-syntax",level:4},{value:"Batchers from elsewhere",id:"batchers-from-elsewhere",level:4},{value:"Inline batch",id:"inline-batch",level:3},{value:"Choice",id:"choice",level:3},{value:"Stream",id:"stream",level:3},{value:"Stream semantics",id:"stream-semantics",level:4},{value:"Steps",id:"steps",level:2}],m={toc:p};function d(e){let{components:n,...t}=e;return(0,r.kt)("wrapper",(0,a.Z)({},m,t,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Resolvers are at the core of gql; a resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]")," takes an ",(0,r.kt)("inlineCode",{parentName:"p"},"I")," and produces an ",(0,r.kt)("inlineCode",{parentName:"p"},"O")," in effect ",(0,r.kt)("inlineCode",{parentName:"p"},"F"),".\nResolvers are embedded in fields and act as continuations.\nWhen gql executes a query it first constructs a tree of continueations from your schema and the supplied GraphQL query."),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"s act and compose like functions with combinators such as ",(0,r.kt)("inlineCode",{parentName:"p"},"andThen")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"compose"),"."),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Arrow")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice"),".")),(0,r.kt)("p",null,"Lets start off with some imports:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql._\nimport gql.dsl.all._\nimport gql.resolver._\nimport gql.ast._\nimport cats.effect._\nimport cats.implicits._\nimport cats.data._\n")),(0,r.kt)("h2",{id:"resolvers"},"Resolvers"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," is a collection of high-level combinators that constructs a tree of ",(0,r.kt)("inlineCode",{parentName:"p"},"Step"),"."),(0,r.kt)("admonition",{type:"note"},(0,r.kt)("p",{parentName:"admonition"},"If you are familiar with the relationship between ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Pull"),", then the relationship between ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," should be familiar.")),(0,r.kt)("h3",{id:"lift"},"Lift"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver.lift")," lifts a function ",(0,r.kt)("inlineCode",{parentName:"p"},"I => O")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"lift"),"'s method form is ",(0,r.kt)("inlineCode",{parentName:"p"},"map"),", which for any resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]")," produces a new resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O2]")," given a function ",(0,r.kt)("inlineCode",{parentName:"p"},"O => O2"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"val r = Resolver.lift[IO, Int](_.toLong)\n// r: Resolver[IO, Int, Long] = gql.resolver.Resolver@490ce5f5\nr.map(_.toString())\n// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@b69376a\n")),(0,r.kt)("h3",{id:"effect"},"Effect"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"effect")," like ",(0,r.kt)("inlineCode",{parentName:"p"},"lift")," lifts a function, but instead an effectful one like ",(0,r.kt)("inlineCode",{parentName:"p"},"I => F[O]")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"effect"),"'s method form is ",(0,r.kt)("inlineCode",{parentName:"p"},"evalMap")," (like ",(0,r.kt)("inlineCode",{parentName:"p"},"Resource")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream"),")."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"val r = Resolver.effect[IO, Int](i => IO(i.toLong))\n// r: Resolver[IO, Int, Long] = gql.resolver.Resolver@7475236f\nr.evalMap(l => IO(l.toString()))\n// res1: Resolver[[x]IO[x], Int, String] = gql.resolver.Resolver@16a819c9\n")),(0,r.kt)("h3",{id:"arguments"},"Arguments"),(0,r.kt)("p",null,"Arguments in gql are provided through resolvers.\nA resolver ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, A]")," can be constructed from an argument ",(0,r.kt)("inlineCode",{parentName:"p"},"Arg[A]"),", through either ",(0,r.kt)("inlineCode",{parentName:"p"},"argument")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"arg")," in method form."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val ageArg = arg[Int]("age")\nval r = Resolver.argument[IO, Nothing, String](arg[String]("name"))\n// r: Resolver[IO, Nothing, String] = gql.resolver.Resolver@38a575a4\nval r2 = r.arg(ageArg)\n// r2: Resolver[IO, Nothing, (Int, String)] = gql.resolver.Resolver@7ab5c7bd\nr2.map{ case (age, name) => s"$name is $age years old" }\n// res2: Resolver[IO, Nothing, String] = gql.resolver.Resolver@2786eddf\n')),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Arg")," also has an applicative defined for it, so multi-argument resolution can be simplified to."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.argument[IO, Nothing, (String, Int)](\n (arg[String]("name"), arg[Int]("age")).tupled\n)\n// r: Resolver[IO, Nothing, (String, Int)] = gql.resolver.Resolver@71b6ceb7\nr.map{ case (age, name) => s"$name is $age years old" }\n// res3: Resolver[IO, Nothing, String] = gql.resolver.Resolver@62ad8d70\n')),(0,r.kt)("h3",{id:"meta"},"Meta"),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"meta")," resolver provides metadata regarding query execution, such as the position of query execution, field aliasing and the provided arguments."),(0,r.kt)("p",null,"It also allows the caller to inspect the query ast such that more exotic operations become possible.\nFor instance, arguments can dynamically be inspected."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val a = arg[Int]("age")\nResolver.meta[IO, String].map(meta => meta.astNode.arg(a))\n// res4: Resolver[IO, String, Option[Int]] = gql.resolver.Resolver@367136b7\n')),(0,r.kt)("p",null,"The ",(0,r.kt)("a",{parentName:"p",href:"/gql/docs/server/integrations/relational"},"relational")," integration makes heavy use of this feature."),(0,r.kt)("h3",{id:"errors"},"Errors"),(0,r.kt)("p",null,"Errors are reported in ",(0,r.kt)("inlineCode",{parentName:"p"},"cats.data.Ior"),"."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"An ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," is a non-exclusive ",(0,r.kt)("inlineCode",{parentName:"p"},"Either"),".")),(0,r.kt)("p",null,"The ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," datatype's left side must be ",(0,r.kt)("inlineCode",{parentName:"p"},"String")," and acts as an optional error that will be present in the query result.\ngql can return an error and a result for the same path, given that ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior")," has both it's left and right side defined."),(0,r.kt)("p",null,"Errors are embedded into resolvers via ",(0,r.kt)("inlineCode",{parentName:"p"},"rethrow"),".\nThe extension method ",(0,r.kt)("inlineCode",{parentName:"p"},"rethrow")," is present on any resolver of type ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, Ior[String, O]]"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.lift[IO, Int](i => Ior.Both("I will be in the errors :)", i))\n// r: Resolver[IO, Int, Ior.Both[String, Int]] = gql.resolver.Resolver@2609ddb3\nr.rethrow\n// res5: Resolver[[A]IO[A], Int, Int] = gql.resolver.Resolver@618d6cb3\n')),(0,r.kt)("p",null,"We can also use ",(0,r.kt)("inlineCode",{parentName:"p"},"emap")," to map the current value into an ",(0,r.kt)("inlineCode",{parentName:"p"},"Ior"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val r = Resolver.id[IO, Int].emap(i => Ior.Both("I will be in the errors :)", i))\n// r: Resolver[IO, Int, Int] = gql.resolver.Resolver@6fc5abff\n')),(0,r.kt)("h3",{id:"first"},"First"),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," also implements ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," (",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, A, B] => Resolver[F, (A, C), (B, C)]"),") which can be convinient for situations where one would usually have to trace a value through an entire computation."),(0,r.kt)("p",null,"Since a ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," does not form a ",(0,r.kt)("inlineCode",{parentName:"p"},"Monad"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," is necessary to implement non-trivial resolver compositions."),(0,r.kt)("p",null,"For instance, maybe your program contains a general resolver compositon that is used many places, like say verifying credentials, but you'd like to trace a value through it without having to keep track of tupling output with input."),(0,r.kt)("p",null,"Assume we'd like to implement a resolver, that when given a person's name, can get a list of the person's friends."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'case class PersonId(value: Int)\n\ncase class Person(id: PersonId, name: String)\n\ndef getFriends(id: PersonId, limit: Int): IO[List[Person]] = ???\n\ndef getPerson(name: String): IO[Person] = ???\n\ndef getPersonResolver = Resolver.effect[IO, String](getPerson)\n\ndef limitResolver = Resolver.argument[IO, Person, Int](arg[Int]("limit"))\n\ndef limitArg = arg[Int]("limit")\ngetPersonResolver\n // \'arg\' tuples the input with the argument value\n .arg(limitArg)\n .evalMap{ case (limit, p) => getFriends(p.id, limit) }\n// res6: Resolver[[x]IO[x], String, List[Person]] = gql.resolver.Resolver@503efc3e\n')),(0,r.kt)("h3",{id:"batch"},"Batch"),(0,r.kt)("p",null,"Like most other GraphQL implementations, gql also supports batching."),(0,r.kt)("p",null,"Unlike most other GraphQL implementations, gql's batching implementation features a global query planner that lets gql delay field execution until it can be paired with another field."),(0,r.kt)("p",null,"Batch declaration and usage occurs as follows:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Declare a function ",(0,r.kt)("inlineCode",{parentName:"li"},"Set[K] => F[Map[K, V]]"),"."),(0,r.kt)("li",{parentName:"ul"},"Give this function to gql and get back a ",(0,r.kt)("inlineCode",{parentName:"li"},"Resolver[F, Set[K], Map[K, V]]")," in a ",(0,r.kt)("inlineCode",{parentName:"li"},"State")," monad (for unique id generation)."),(0,r.kt)("li",{parentName:"ul"},"Use this new resolver where you want batching.")),(0,r.kt)("p",null,"And now put into practice:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def getPeopleFromDB(ids: Set[PersonId]): IO[List[Person]] = ???\n\nResolver.batch[IO, PersonId, Person]{ keys => \n getPeopleFromDB(keys).map(_.map(x => x.id -> x).toMap)\n}\n// res7: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@38b247cf\n")),(0,r.kt)("p",null,"Whenever gql sees this resolver in any composition, it will look for similar resolvers during query planning."),(0,r.kt)("p",null,"Note, however, that you should only declare each batch resolver variant ",(0,r.kt)("strong",{parentName:"p"},"once"),", that is, you should build your schema in ",(0,r.kt)("inlineCode",{parentName:"p"},"State"),".\ngql consideres different batch instantiations incompatible regardless of any type information."),(0,r.kt)("p",null,"State has ",(0,r.kt)("inlineCode",{parentName:"p"},"Monad")," (and transitively ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative"),") defined for it, so it composes well.\nHere is an example of multiple batchers:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def b1 = Resolver.batch[IO, Int, Person](_ => ???)\ndef b2 = Resolver.batch[IO, Int, String](_ => ???)\n\n(b1, b2).tupled\n// res8: State[SchemaState[IO], (Resolver[IO, Set[Int], Map[Int, Person]], Resolver[IO, Set[Int], Map[Int, String]])] = cats.data.IndexedStateT@32504c9e\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Even if your field doesn't benefit from batching, batching can still do duplicate key elimination.")),(0,r.kt)("h4",{id:"batch-resolver-syntax"},"Batch resolver syntax"),(0,r.kt)("p",null,"When a resolver in a very specific form ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, Set[K], Map[K, V]]"),", then the gql dsl provides some helper methods.\nFor instance, a batcher may be embedded in a singular context (",(0,r.kt)("inlineCode",{parentName:"p"},"K => V"),").\nHere is a showcase of some of the helper methods:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def pb: Resolver[IO, Set[Int], Map[Int, Person]] = \n // Stub implementation\n Resolver.lift(_ => Map.empty)\n\n// None if a key is missing\npb.all[List]\n// res9: Resolver[[A]IO[A], List[Int], List[Option[Person]]] = gql.resolver.Resolver@70089378\n\n// Every key must have an associated value\n// or else raise an error via a custom show-like typeclass\nimplicit lazy val showMissingPersonId =\n ShowMissingKeys.showForKey[Int]("not all people could be found")\npb.traversable[List]\n// res10: Resolver[[A]IO[A], List[Int], List[Person]] = gql.resolver.Resolver@57f04e4b\n\n// Maybe there is one value for one key?\npb.opt\n// res11: Resolver[[A]IO[A], Int, Option[Person]] = gql.resolver.Resolver@7d56f745\n\n// Same as opt\npb.all[cats.Id]\n// res12: Resolver[[A]IO[A], cats.package.Id[Int], cats.package.Id[Option[Person]]] = gql.resolver.Resolver@4d5270cd\n\n// There is always one value for one key\npb.one\n// res13: Resolver[[A]IO[A], Int, Person] = gql.resolver.Resolver@79f5b94c\n\n// You can be more explicit via the `batch` method\npb.batch.all[NonEmptyList]\n// res14: Resolver[[A]IO[A], NonEmptyList[Int], NonEmptyList[Option[Person]]] = gql.resolver.Resolver@2ca2222f\n')),(0,r.kt)("p",null,"Using ",(0,r.kt)("inlineCode",{parentName:"p"},"batch")," aids with better compiler error messages."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Resolver.lift[IO, Int](_.toString()).batch.all\n// error: Cannot prove that Set[K] =:= Int.\n// Resolver.lift[IO, Int](_.toString()).batch.all\n// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"For larger programs, consider declaring all your batchers up-front and putting them into some type of collection:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"case class MyBatchers(\n personBatcher: Resolver[IO, Set[Int], Map[Int, Person]],\n intStringBatcher: Resolver[IO, Set[Int], Map[Int, String]]\n)\n\n(b1, b2).mapN(MyBatchers.apply)\n// res16: State[SchemaState[IO], MyBatchers] = cats.data.IndexedStateT@386b52ad\n")),(0,r.kt)("p",{parentName:"admonition"},"For most batchers it is likely that you eventually want to pre-compose them in various ways, for instance requsting args, which this pattern promotes.")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Sometimes you have multiple groups of fields in the same object where each group have different performance overheads."),(0,r.kt)("p",{parentName:"admonition"},"Say you had a ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," object in your database.\nThis ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," object also exists in a remote api.\nThis remote api can tell you, the friends of a ",(0,r.kt)("inlineCode",{parentName:"p"},"Person")," given the object's id and name.\nWritten out a bit more structured we have that:"),(0,r.kt)("ul",{parentName:"admonition"},(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonId")," (identity)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonDB")," (database query)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonDB => PersonRemoteAPI")," (remote api call)"),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"PersonId => PersonRemoteAPI")," (composition of database query and remote api call)")),(0,r.kt)("p",{parentName:"admonition"},"And now put into code:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'// We have a trivial id field for our person id\ndef pureFields = fields[IO, PersonId](\n "id" -> lift(id => id)\n)\n\n// If we query our database with a person id, we get a person database object\ncase class PersonDB(\n id: PersonId, \n name: String, \n remoteApiId: String\n)\n\n// SELECT id, name, remote_api_id FROM person WHERE id in (...)\ndef dbBatchResolver: Resolver[IO, PersonId, PersonDB] = ???\n\n// From the db we can get the name and the remote api id\ndef dbFields = fields[IO, PersonDB](\n "name" -> lift(_.name),\n "apiId" -> lift(_.remoteApiId)\n)\n\n// The remote api data can be found given the result of a db query\ncase class PersonRemoteAPI(\n id: PersonId, \n friends: List[PersonId]\n)\n\n// Given a PersonDB we can call the api (via a batched GET or something)\ndef personBatchResolver: Resolver[IO, PersonDB, PersonRemoteAPI] = ???\n\n// We can get the friends from the remote api\ndef remoteApiFields = fields[IO, PersonRemoteAPI](\n "friends" -> lift(_.friends)\n)\n\n// Now we can start composing our fields\n// We can align the types of the db and remote api data to the PersonDB type\n// by composing the remote api resolver on the remote api fields\ndef dbFields2: Fields[IO, PersonDB] = \n remoteApiFields.compose(personBatchResolver) ::: dbFields\n\n// Given a PersonId we have every field\n// If "friends" is selected, gql will first run `dbBatchResolver` and then `personBatchResolver`\ndef allFields = dbFields2.compose(dbBatchResolver) ::: pureFields\n\nimplicit def person: Type[IO, PersonId] = tpeNel[IO, PersonId](\n "Person",\n allFields\n)\n')),(0,r.kt)("p",{parentName:"admonition"},"The general pattern for this decomposition revolves around figuring out what the most basic description of your object is.\nIn this example, every fields can (eventually through various side-effects) be resolved from just ",(0,r.kt)("inlineCode",{parentName:"p"},"PersonId"),".")),(0,r.kt)("h4",{id:"batchers-from-elsewhere"},"Batchers from elsewhere"),(0,r.kt)("p",null,"Most batching implementations have compatible signatures and can be adapted into a gql batcher."),(0,r.kt)("p",null,"For instance, converting ",(0,r.kt)("inlineCode",{parentName:"p"},"fetch")," to gql:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import fetch._\nobject People extends Data[PersonId, Person] {\n def name = "People"\n\n def source: DataSource[IO, PersonId, Person] = ???\n}\n\nResolver\n .batch[IO, PersonId, Person](_.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty)))\n// res17: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@799b07a8\n')),(0,r.kt)("h3",{id:"inline-batch"},"Inline batch"),(0,r.kt)("p",null,"A batch resolver can also be defined inline with some notable differences to the regular batch resolver:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"It does not need to be defined in state."),(0,r.kt)("li",{parentName:"ul"},"It is not subject to global query planning, and is only ever called with inputs from the same selection.")),(0,r.kt)("p",null,"The inline batch resolver has the same signature as a regular batch resolver; ",(0,r.kt)("inlineCode",{parentName:"p"},"Set[K] => F[Map[K, V]]"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Resolver.inlineBatch[IO, PersonId, Person](\n _.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty))\n)\n// res18: Resolver[IO, Set[PersonId], Map[PersonId, Person]] = gql.resolver.Resolver@73fe0342\n")),(0,r.kt)("h3",{id:"choice"},"Choice"),(0,r.kt)("p",null,"Resolvers also implement ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice")," via ",(0,r.kt)("inlineCode",{parentName:"p"},"(Resolver[F, A, C], Resolver[F, B, D]) => Resolver[F, Either[A, B], Either[C, D]]"),".\nOn the surface, this combinator may have limited uses, but with a bit of composition we can perform tasks such as caching."),(0,r.kt)("p",null,"For instance, a combinator derived from ",(0,r.kt)("inlineCode",{parentName:"p"},"Choice")," is ",(0,r.kt)("inlineCode",{parentName:"p"},"skippable: Resolver[F, I, O] => Resolver[F, Either[I, O], O]"),', which acts as a variant of "caching".\nIf the right side is present we skip the underlying resolver (',(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, O]"),") altogether."),(0,r.kt)("p",null,"For any resolver in the form ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver[F, I, Either[L, R]]")," we modify the left side with ",(0,r.kt)("inlineCode",{parentName:"p"},"leftThrough")," and the right with ",(0,r.kt)("inlineCode",{parentName:"p"},"rightThrough"),"."),(0,r.kt)("p",null,"For Instance we can implement caching."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def getPersonForId(id: PersonId): IO[Person] = ???\n\ntype CachedPerson = Either[PersonId, Person]\ndef cachedPerson = tpe[IO, CachedPerson](\n "Person",\n "id" -> lift(_.map(_.id).merge.value),\n // We\'ll align the left and right side of the choice and then merge the `Either`\n "name" -> build[IO, CachedPerson](_.leftThrough(_.evalMap(getPersonForId)).map(_.merge.name))\n)\n')),(0,r.kt)("p",null,"We can also use some of the ",(0,r.kt)("inlineCode",{parentName:"p"},"compose")," tricks from the ",(0,r.kt)("a",{parentName:"p",href:"#batch-resolver-syntax"},"batch resolver syntax section")," if we have a lot of fields that depend on ",(0,r.kt)("inlineCode",{parentName:"p"},"Person"),". "),(0,r.kt)("admonition",{type:"note"},(0,r.kt)("p",{parentName:"admonition"},"The query planner treats the choice branches as parallel, such that for two instances of a choice, resolvers in the two branches may be batched together.")),(0,r.kt)("h3",{id:"stream"},"Stream"),(0,r.kt)("p",null,"The stream resolver embeds an ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2.Stream")," and provides the ability to emit a stream of results for a graphql subscription."),(0,r.kt)("h4",{id:"stream-semantics"},"Stream semantics"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"When one or more streams emit, the interpreter will re-evaluate the query from the position that emitted.\nThat is, only the sub-tree that changed will be re-interpreted."),(0,r.kt)("li",{parentName:"ul"},"If two streams emit and one occurs as a child of the other, the child will be ignored since it will be replaced."),(0,r.kt)("li",{parentName:"ul"},"By default, the interpreter will only respect the most-recent emitted data.")),(0,r.kt)("p",null,"This means that by default, gql assumes that your stream should behave like a signal, not sequentially.\nHowever, gql can also adhere sequential semantics."),(0,r.kt)("p",null,"For instance a schema designed like the following, emits incremental updates regarding the price for some symbol:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-graphql"},"type PriceChange {\n difference: Float!\n}\n\ntype Subscription {\n priceChanges(symbolId: ID!): PriceChange!\n}\n")),(0,r.kt)("p",null,"And here is a schema that represents an api that emits updates regarding the current price of a symbol:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-graphql"},"type SymbolState {\n price: Float!\n}\n\ntype Subscription {\n price(symbolId: ID!): SymbolState!\n}\n")),(0,r.kt)("p",null,"Consider the following example where two different evaluation semantics are displayed:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"case class PriceChange(difference: Float)\ndef priceChanges(symbolId: String): fs2.Stream[IO, PriceChange] = ???\n\ncase class SymbolState(price: Float)\ndef price(symbolId: String): fs2.Stream[IO, SymbolState] = ???\n\ndef priceChangesResolver = Resolver.id[IO, String].sequentialStreamMap(priceChanges)\n\ndef priceResolver = Resolver.id[IO, String].streamMap(price)\n")),(0,r.kt)("p",null,"If your stream is sequential, gql will only pull elements when they are needed."),(0,r.kt)("p",null,"The interpreter performs a global re-interpretation of your schema, when one or more streams emit.\nThat is, the interpreter cycles through the following two phases:"),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},"Interpret for the current values."),(0,r.kt)("li",{parentName:"ul"},"Await new values (and values that arrived during the previous step).")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"Since gql is free to ignore updates when a stream is a signal, one should prefer ",(0,r.kt)("inlineCode",{parentName:"p"},"evalMap")," on a ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," instead of a stream if possible.")),(0,r.kt)("admonition",{type:"warning"},(0,r.kt)("p",{parentName:"admonition"},"For a given stream it must hold all child resources alive (maybe the child resources are also streams that may emit).\nAs such, for a given stream, gql must await a next element from the stream before releasing any currently held resources sub-tree.\nThis means that gql must be able to pull one element before closing the old one.")),(0,r.kt)("admonition",{type:"tip"},(0,r.kt)("p",{parentName:"admonition"},"If you have streams of updates where you are only interested in that something changed (",(0,r.kt)("inlineCode",{parentName:"p"},"Stream[F, Unit]"),") there may be room for significant optimization.\nIn ",(0,r.kt)("inlineCode",{parentName:"p"},"fs2")," you can merge streams with combinators such as ",(0,r.kt)("inlineCode",{parentName:"p"},"parJoin"),", but they have to assume that there may be resources to account for.\nIf you are discarding the output of the stream or you are absolutely sure that the output does not depend on a resource lifetime,\none can write more optimized versions functions for this purpose."),(0,r.kt)("details",null,(0,r.kt)("summary",null,"Some examples of potentially more performant implementations"),(0,r.kt)("p",{parentName:"admonition"},"In a crude benchmarks, these combinators may perform an order of magnitude faster than ",(0,r.kt)("inlineCode",{parentName:"p"},"parJoin")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"merge"),"."),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import fs2.{Pipe, Stream}\nimport fs2.concurrent._\ndef parListen[A]: Pipe[IO, Stream[IO, A], Unit] =\n streams =>\n for {\n d <- Stream.eval(IO.deferred[Either[Throwable, Unit]])\n c <- Stream.eval(IO.deferred[Unit])\n sigRef <- Stream.eval(SignallingRef[IO, Unit](()))\n\n bg = streams.flatMap { sub =>\n Stream.supervise {\n sub\n .evalMap(_ => sigRef.set(()))\n .compile\n .drain\n .onError(e => d.complete(Left(e)).void)\n .onCancel(c.complete(()).void)\n }.void\n }\n\n listenCancel = (c.get *> IO.canceled).as(Right(()): Either[Throwable, Unit])\n fg = sigRef.discrete.interruptWhen(d).interruptWhen(listenCancel)\n\n _ <- fg.concurrently(bg)\n } yield ()\n\ndef parListenSignal[A]: Pipe[IO, Stream[IO, A], A] =\n streams =>\n Stream.eval(SignallingRef.of[IO, Option[A]](None)).flatMap { sig =>\n sig.discrete.unNone.concurrently {\n streams.parEvalMapUnorderedUnbounded { x =>\n x.evalMap(x => sig.set(Some(x))).compile.drain\n }\n }\n }\n")))),(0,r.kt)("p",null,"Here is an example of some streams in action:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import scala.concurrent.duration._\nimport cats.effect.unsafe.implicits.global\n\ncase class Streamed(value: Int)\n\nimplicit lazy val streamed: Type[IO, Streamed] = tpe[IO, Streamed](\n "Streamed",\n "value" -> build[IO, Streamed](_.streamMap{ s =>\n fs2.Stream\n .bracket(IO(println(s"allocating $s")))(_ => IO(println(s"releasing $s"))) >>\n fs2.Stream\n .iterate(0)(_ + 1)\n .evalTap(n => IO(println(s"emitting $n for $s")))\n .meteredStartImmediately(((5 - s.value) * 20).millis)\n .as(Streamed(s.value + 1))\n })\n)\n\ndef query = """\n subscription {\n streamed {\n value {\n value { \n value {\n __typename\n }\n }\n }\n }\n }\n"""\n\ndef schema = SchemaShape.unit[IO](\n fields("ping" -> lift(_ => "pong")),\n subscription = Some(fields("streamed" -> lift(_ => Streamed(0))))\n)\n\nSchema.simple(schema)\n .map(Compiler[IO].compile(_, query))\n .flatMap { case Right(Application.Subscription(stream)) => stream.take(4).compile.drain }\n .unsafeRunSync()\n// allocating Streamed(0)\n// emitting 0 for Streamed(0)\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// emitting 1 for Streamed(2)\n// emitting 1 for Streamed(1)\n// emitting 1 for Streamed(0)\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// emitting 2 for Streamed(2)\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// releasing Streamed(1)\n// releasing Streamed(2)\n// emitting 2 for Streamed(1)\n// emitting 1 for Streamed(2)\n// releasing Streamed(0)\n// releasing Streamed(2)\n// releasing Streamed(2)\n// releasing Streamed(1)\n')),(0,r.kt)("p",null,"gql also allows the user to specify how much time the interpreter may await more stream updates:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Schema.simple(schema).map(Compiler[IO].compile(_, query, accumulate=Some(10.millis)))\n")),(0,r.kt)("p",null,"furthermore, gql can also emit interpreter information if you want to look into what gql is doing:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"Schema.simple(schema)\n .map(Compiler[IO].compile(_, query, debug=gql.server.interpreter.DebugPrinter[IO](s => IO(println(s)))))\n .flatMap { case Right(Application.Subscription(stream)) => stream.take(3).compile.drain }\n .unsafeRunSync()\n// allocating Streamed(0)\n// emitting 0 for Streamed(0)\n// publishing at index 0 at root.streamed.value\n// allocating Streamed(1)\n// emitting 0 for Streamed(1)\n// publishing at index 0 at root.streamed.value.value\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// publishing at index 0 at root.streamed.value.value.value\n// unconsing with current tree:\n// |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a\n// got state, awaiting a non-empty state (publication)\n// emitting 1 for Streamed(2)\n// publishing at index 1 at root.streamed.value.value.value\n// done publishing at index 1 at root.streamed.value.value.value, await? true\n// got non-empty state, awaiting 5 milliseconds\n// unconsed:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = StreamData(\n// cont = Continuation.Done(\n// Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = __typename,\n// alias = None,\n// cont = PreparedCont(\n// edges = Lift(...),\n// cont = PreparedLeaf(String)\n// )\n// )\n// }\n// )\n// )\n// ),\n// value = Right(repl.MdocSession$MdocApp$Streamed$1)\n// )\n// )\n// ]\n// emitting 1 for Streamed(1)\n// unconsed after removing old children:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = ditto\n// )\n// ]\n// tree after unconsing:\n// |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a\n// publishing at index 1 at root.streamed.value.value\n// done publishing at index 1 at root.streamed.value.value, await? true\n// emitting 1 elements from uncons\n// interpreting for 1 inputs\n// done interpreting\n// unconsing with current tree:\n// |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a\n// got state, awaiting a non-empty state (publication)\n// got non-empty state, awaiting 5 milliseconds\n// emitting 1 for Streamed(0)\n// publishing at index 1 at root.streamed.value\n// done publishing at index 1 at root.streamed.value, await? true\n// unconsed:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = StreamData(\n// cont = Continuation.Done(\n// Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = value,\n// alias = None,\n// cont = PreparedCont(\n// edges = Compose(\n// left = Compose(left = Lift(...), right = Lift(...)),\n// right = EmbedStream(signal = true)\n// ),\n// cont = Selection(\n// PreparedSpecification(\n// typename = Streamed,\n// selections = PreparedSelections{\n// PreparedDataField(\n// name = __typename,\n// alias = None,\n// cont = PreparedCont(\n// edges = Lift(...),\n// cont = PreparedLeaf(String)\n// )\n// )\n// }\n// )\n// )\n// )\n// )\n// }\n// )\n// )\n// ),\n// value = Right(repl.MdocSession$MdocApp$Streamed$1)\n// )\n// )\n// ]\n// unconsed after removing old children:\n// [\n// ResourceInfo(\n// parentName = root.streamed.value.value (signal = true),\n// name = resource-1,\n// open = true,\n// value = ditto\n// )\n// ]\n// tree after unconsing:\n// |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a\n// emitting 1 elements from uncons\n// interpreting for 1 inputs\n// allocating Streamed(2)\n// emitting 0 for Streamed(2)\n// publishing at index 0 at root.streamed.value.value.value\n// done interpreting\n// releasing Streamed(1)\n// releasing Streamed(2)\n// releasing Streamed(0)\n// releasing Streamed(2)\n")),(0,r.kt)("h2",{id:"steps"},"Steps"),(0,r.kt)("p",null,"A ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," is the low-level algebra for a resolver, that describes a single step of evaluation for a query.\nThe variants of ",(0,r.kt)("inlineCode",{parentName:"p"},"Step")," are clearly listed in the source code. All variants of step provide orthogonal properties."))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/62af8b26.18142116.js b/assets/js/62af8b26.1bedbcd2.js similarity index 85% rename from assets/js/62af8b26.18142116.js rename to assets/js/62af8b26.1bedbcd2.js index 0e44dfbe..57f62937 100644 --- a/assets/js/62af8b26.18142116.js +++ b/assets/js/62af8b26.1bedbcd2.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[508],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>m});var t=a(7294);function o(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(o[a]=e[a]);return o}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,o=e.mdxType,l=e.originalType,s=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),d=p(a),m=o,g=d["".concat(s,".").concat(m)]||d[m]||u[m]||l;return a?t.createElement(g,i(i({ref:n},c),{},{components:a})):t.createElement(g,i({ref:n},c))}));function m(e,n){var a=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var l=a.length,i=new Array(l);i[0]=d;var r={};for(var s in n)hasOwnProperty.call(n,s)&&(r[s]=n[s]);r.originalType=e,r.mdxType="string"==typeof e?e:o,i[1]=r;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>l,metadata:()=>r,toc:()=>p});var t=a(7462),o=(a(7294),a(3905));const l={title:"Relational"},i=void 0,r={unversionedId:"server/integrations/relational",id:"server/integrations/relational",title:"Relational",description:"This integration is fairly new and sofisticated so it can be subject to change.",source:"@site/docs/server/integrations/relational.md",sourceDirName:"server/integrations",slug:"/server/integrations/relational",permalink:"/gql/docs/server/integrations/relational",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/integrations/relational.md",tags:[],version:"current",frontMatter:{title:"Relational"},sidebar:"docs",previous:{title:"Global object identification",permalink:"/gql/docs/server/integrations/goi"},next:{title:"Query DSL",permalink:"/gql/docs/client/dsl"}},s={},p=[{value:"Skunk example",id:"skunk-example",level:2},{value:"Simplifying relationships",id:"simplifying-relationships",level:3},{value:"Runtime semantics",id:"runtime-semantics",level:2},{value:"Implementing your own integration",id:"implementing-your-own-integration",level:2},{value:"Adding arguments",id:"adding-arguments",level:2},{value:"Sum types",id:"sum-types",level:2},{value:"Declaring complex subqueries",id:"declaring-complex-subqueries",level:2},{value:"Using relational without tables",id:"using-relational-without-tables",level:2},{value:"Running transactions",id:"running-transactions",level:2},{value:"Handling N+1",id:"handling-n1",level:2}],c={toc:p};function u(e){let{components:n,...a}=e;return(0,o.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("admonition",{type:"caution"},(0,o.kt)("p",{parentName:"admonition"},"This integration is fairly new and sofisticated so it can be subject to change.")),(0,o.kt)("p",null,"gql also comes with an optional integration for relational databases."),(0,o.kt)("p",null,"The relational integration is library agnostic and is based on query fragments that can be composed into a full query."),(0,o.kt)("p",null,"The relational module ships with two implementations, one for ",(0,o.kt)("inlineCode",{parentName:"p"},"skunk")," and another for ",(0,o.kt)("inlineCode",{parentName:"p"},"doobie"),".\nThey can be found in the ",(0,o.kt)("a",{parentName:"p",href:"../../overview/modules"},"modules")," section."),(0,o.kt)("admonition",{type:"tip"},(0,o.kt)("p",{parentName:"admonition"},"Integrating a new library requires very little code.\nThe skunk integration only spans 18 lines of code.")),(0,o.kt)("h2",{id:"skunk-example"},"Skunk example"),(0,o.kt)("p",null,"For this example we will use ",(0,o.kt)("inlineCode",{parentName:"p"},"skunk"),".\nWe will start off with some imports."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"import skunk._\nimport skunk.codec.all._\nimport skunk.implicits._\nimport gql.ast._\nimport gql.dsl.all._\nimport gql.relational._\nimport gql.relational.skunk.dsl._\nimport gql.relational.skunk.dsl.algebra.QueryContext\nimport cats._\nimport cats.data._\nimport cats.arrow._\nimport cats.effect._\nimport cats.implicits._\n")),(0,o.kt)("p",null,"Before we start declaring fragments, we need to define our domain."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"final case class Home(name: String, address: String)\n// many homes belong to many people\nfinal case class Person(name: String, age: Int)\n// a pet has one owner\nfinal case class Pet(name: String, age: Int, owner: Int)\n")),(0,o.kt)("p",null,"The realtional module also ships with a dsl that makes declaration use conscise.\nWe will start off just declaring the home table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class HomeTable(\n // When a table is queried it must have an alias\n alias: String\n) extends SkunkTable {\n // Note that we use only skunk tools to declare the contents of this structure\n\n // We can declare how this table is referenced in sql (or some other query language)\n def table = void"home"\n\n // The SkunkTable trait gives some convinience methods for declaring columns\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (addressCol, address) = sel("address", text)\n\n // The projection that uniquely identifies a row in the table\n def tableKey = id\n}\n// We get some methods if show how given an alias we can get a table\nval homeTable = skunkTable(HomeTable)\n')),(0,o.kt)("p",null,"We will also need to declare the other two tables, this time with less comments."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class PersonTable(alias: String) extends SkunkTable {\n def table = void"person"\n\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval personTable = skunkTable(PersonTable)\n\ncase class PetTable(alias: String) extends SkunkTable {\n def table = void"pet"\n\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n val (ownerCol, owner) = sel("owner", int4)\n\n def tableKey = id\n}\nval petTable = skunkTable(PetTable)\n')),(0,o.kt)("p",null,"Since ",(0,o.kt)("inlineCode",{parentName:"p"},"Home")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Person")," have a many to many relationship, we will have to go through another table table to get the relationship."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class HomePersonTable(alias: String) extends SkunkTable {\n def table = void"home_person"\n\n val (homeCol, home) = sel("home_id", int4)\n val (personCol, person) = sel("person_id", int4)\n\n def tableKey = (home, person).tupled\n}\nval homePersonTable = skunkTable(HomePersonTable)\n')),(0,o.kt)("p",null,"Now we can start declaring our graphql schema."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pet: Type[IO, QueryContext[PetTable]] = \n tpe[IO, QueryContext[PetTable]](\n "PetTable",\n "name" -> query(_.name), // query is a method that compiles to a projection in the query language (sql)\n "age" -> query(_.age)\n )\n\nimplicit lazy val person: Type[IO, QueryContext[PersonTable]] = \n tpe[IO, QueryContext[PersonTable]](\n "PersonTable",\n "name" -> query(_.name),\n "age" -> query(_.age),\n "pets" -> cont{ person => // cont is a continuation that will create a new table from the current one\n // The join method takes a type parameter that declares the multiplicity of the join\n // If no type parameter is given, the join is assumed to be one to one\n petTable.join[List]{ pet =>\n // Given an instance of the pet table, we can declare a join predicate\n sql"${pet.ownerCol} = ${person.idCol}"\n }\n }\n )\n\nimplicit lazy val home: Type[IO, QueryContext[HomeTable]] = \n tpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "name" -> query(_.name),\n "address" -> query(_.address),\n "caption" -> query(h => (h.name, h.address).mapN(_ + " at " + _)), // projections form an applicative\n "people" -> cont{ home =>\n // Tables can be flatmapped together\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol}")\n } yield p\n }\n )\n')),(0,o.kt)("p",null,"Now we are done declaring our schema."),(0,o.kt)("p",null,"Before querying it we will need our database up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.effect.unsafe.implicits.global\nimport natchez.noop._ // needed for skunk connection\nimplicit val trace: natchez.Trace[IO] = NoopTrace[IO]()\n\ndef connection = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n')),(0,o.kt)("details",null,(0,o.kt)("summary",null,"We will also need to create our tables and insert some data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'connection.use{ ses =>\n val queries = List(\n sql"drop table if exists pet",\n sql"drop table if exists home_person",\n sql"drop table if exists person",\n sql"drop table if exists home",\n sql"""create table home_person (\n home_id int not null,\n person_id int not null\n )""",\n sql"""create table pet (\n id int4 primary key,\n name text not null,\n age int not null,\n owner int not null\n )""",\n sql"""create table person (\n id int4 primary key,\n name text not null,\n age int not null\n )""",\n sql"""create table home (\n id int4 primary key,\n name text not null,\n address text not null\n )""",\n sql"""insert into home (id, name, address) values (1, \'Doe Home\', \'123 Main St\')""",\n sql"""insert into person (id, name, age) values (1, \'John Doe\', 42)""",\n sql"""insert into person (id, name, age) values (2, \'Jane Doe\', 40)""",\n sql"""insert into home_person (home_id, person_id) values (1, 1)""", \n sql"""insert into home_person (home_id, person_id) values (1, 2)""",\n sql"""insert into pet (id, name, age, owner) values (1, \'Fluffy\', 2, 1)""",\n )\n\n queries.traverse(x => ses.execute(x.command))\n}.unsafeRunSync()\n// res0: List[..skunk.data.Completion] = List(\n// DropTable,\n// DropTable,\n// DropTable,\n// DropTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1)\n// )\n'))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(connection) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n)\n\ndef q = """\nquery {\n homes {\n name\n address\n caption\n people {\n name\n age\n pets {\n name\n age\n }\n }\n }\n}\n"""\n\nimport io.circe.syntax._\nimport gql.{Compiler, Application}\nschema\n .map(Compiler[IO].compile(_, q))\n .flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }\n .unsafeRunSync()\n// res1: String = """{\n// "data" : {\n// "homes" : [\n// {\n// "address" : "123 Main St",\n// "caption" : "Doe Home at 123 Main St",\n// "name" : "Doe Home",\n// "people" : [\n// {\n// "age" : 42,\n// "name" : "John Doe",\n// "pets" : [\n// {\n// "age" : 2,\n// "name" : "Fluffy"\n// }\n// ]\n// },\n// {\n// "age" : 40,\n// "name" : "Jane Doe",\n// "pets" : [\n// ]\n// }\n// ]\n// }\n// ]\n// }\n// }"""\n')),(0,o.kt)("p",null,"And thats it!"),(0,o.kt)("p",null,"Just for fun, we check out the generated sql."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.relational.skunk._\nimplicit def logQueries[F[_]: MonadCancelThrow]: SkunkIntegration.Queryable[F] = \n new SkunkIntegration.Queryable[F] {\n def apply[A](\n query: AppliedFragment,\n decoder: Decoder[A], \n connection: SkunkIntegration.Connection[F]\n ): F[List[A]] = {\n println(query.fragment.sql)\n SkunkIntegration.skunkQueryable[F].apply(query, decoder, connection)\n }\n}\n\ndef schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(connection) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n)\n\nschema\n .map(Compiler[IO].compile(_, q))\n .flatMap { case Right(Application.Query(run)) => run.void }\n .unsafeRunSync()\n// select t1.id, t1.address, t1.name, t1.address, t1.name, t2.home_id, t2.person_id, t3.id, t3.age, t3.name, t4.id, t4.age, t4.name\n// from home as t1\n// left join home_person as t2 on t1.id = t2.home_id\n// left join person as t3 on t2.person_id = t3.id\n// left join pet as t4 on t4.owner = t3.id\n// where true\n')),(0,o.kt)("h3",{id:"simplifying-relationships"},"Simplifying relationships"),(0,o.kt)("p",null,"The join between ",(0,o.kt)("inlineCode",{parentName:"p"},"home")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"person")," can be a bit daunting, since you have to keep track of multiplicity yourself.\nInstead we can use the database to handle some of the multiplicity for us by generalizing the person table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class SharedPersonTable(alias: String, table: AppliedFragment) extends SkunkTable {\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\n\nval sharedPersonTable = skunkTable(SharedPersonTable(_, void"person"))\n\nval homePersonQuery = void"(select * from home_person inner join person on home_person.person_id = person.id)"\nval sharedHomePersonTable = skunkTable(SharedPersonTable(_, homePersonQuery))\n\n// And now using our subquery we can simplify the join.\nimplicit lazy val person: Type[IO, QueryContext[SharedPersonTable]] = ???\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "name" -> query(_.name),\n "address" -> query(_.address),\n "caption" -> query(h => (h.name, h.address).mapN(_ + " at " + _)), // projections form an applicative\n "people" -> cont{ h => \n sharedHomePersonTable.join[List](hp => sql"${h.idCol} = ${hp.aliased(sql"home_id")}")\n }\n)\n')),(0,o.kt)("h2",{id:"runtime-semantics"},"Runtime semantics"),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"This section is a technical reference, and not necessary to use the library.")),(0,o.kt)("p",null,"Data emitted by SQL is not hierarchical, but instead flat; for it to map well to graphql, which is hierarchical some work must be performed.\nMost use-cases are covered by simply invoking the ",(0,o.kt)("inlineCode",{parentName:"p"},"join")," method with the proper multiplicity parameter."),(0,o.kt)("p",null,"When your AST is inspected to build a query, a recursive AST walk composes a big reassociation function that can translate flat query results into the proper hierarchical structure.\nThis composed function also tracks the visited columns and their decoders."),(0,o.kt)("p",null,"The query algebra has a special operation that lets the caller modify the state however they wish.\nThe dsl uses this state modification for various tasks, such as providing a convinient ",(0,o.kt)("inlineCode",{parentName:"p"},"join")," method that both joins a table and performs the proper reassociation of results.\nConsider the following example that joins a table more explicitly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"val q1 = for {\n ht <- homeTable.simpleJoin(_ => void\"true\")\n _ <- reassociate[List](ht.tableKey)\n // some other reassociation criteria\n _ <- reassociate[Option](select(int4, void\"42\"))\n} yield ht\n// q1: algebra.Query[[X]List[Option[X]], HomeTable] = FlatMap(\n// fa = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@40921696)),\n// f = gql.relational.QueryDsl$$Lambda$13992/0x0000000803808040@3d61fd2\n// ),\n// f = \n// )\n\n// we can perform reassociation before performing the actions in 'q1'\nval q2 = reassociate[Option](select(text, void\"'john doe'\")).flatMap(_ => q1)\n// q2: algebra.Query[[X]Option[List[Option[X]]], HomeTable] = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@37334eb5)),\n// f = \n// )\n\n// we can also change the result structure after performing the actions in 'q2'\nq2.mapK[List](new (\u03bb[X => Option[List[Option[X]]]] ~> List) {\n def apply[A](fa: Option[List[Option[A]]]): List[A] = fa.toList.flatten.flatMap(_.toList)\n})\n// res4: algebra.Query[List, HomeTable] = LiftEffect(\n// fa = EitherT(value = cats.data.IndexedStateT@6d3f78d)\n// )\n")),(0,o.kt)("p",null,"Accessing the lowlevel state also lets the user perform other tasks such as unique id (new alias) generation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"for {\n alias1 <- newAlias\n alias2 <- newAlias\n} yield ()\n// res5: algebra.Query[[X]X, Unit] = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@56a57767)),\n// f = \n// )\n")),(0,o.kt)("h2",{id:"implementing-your-own-integration"},"Implementing your own integration"),(0,o.kt)("p",null,"The entire dsl and query compiler is available if you implement a couple of methods."),(0,o.kt)("p",null,"Here is the full skunk integration."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import _root_.{skunk => sk}\nobject MyIntegration extends QueryAlgebra {\n // What is a fragment\n type Frag = sk.AppliedFragment\n // How do we transform a string into a fragment\n def stringToFrag(s: String): Frag = sql"#${s}".apply(Void)\n // Combine and create empty fragments\n implicit def appliedFragmentMonoid: Monoid[Frag] = sk.AppliedFragment.MonoidAppFragment\n // How do we decode values\n type Decoder[A] = sk.Decoder[A]\n // How can we combine decoders\n implicit def applicativeForDecoder: Applicative[Decoder] = Decoder.ApplicativeDecoder\n // How do we make an optional decoder\n def optDecoder[A](d: Decoder[A]): Decoder[Option[A]] = d.opt\n // What is needed to perform a query\n type Connection[F[_]] = Resource[F, Session[F]]\n // Given a connection, how do we use it\n implicit def skunkQueryable[F[_]: MonadCancelThrow]: Queryable[F] = new Queryable[F] {\n def apply[A](query: AppliedFragment, decoder: Decoder[A], connection: Connection[F]): F[List[A]] =\n connection.use(_.execute(query.fragment.query(decoder))(query.argument))\n }\n}\n')),(0,o.kt)("p",null,"The dsl can be instantiated for any query algebra."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"object myDsl extends QueryDsl(MyIntegration)\n")),(0,o.kt)("p",null,"you can also add integration specific methods to your dsl."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"object myDsl extends QueryDsl(MyIntegration) {\n def someOperationSpecificToMyIntegration = ???\n}\n")),(0,o.kt)("h2",{id:"adding-arguments"},"Adding arguments"),(0,o.kt)("p",null,"All field combinators allow arguments to be provided naturally, regardless of where the field is in the query."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pt: Type[IO, QueryContext[PersonTable]] = ???\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))\n } yield p\n }\n)\n')),(0,o.kt)("h2",{id:"sum-types"},"Sum types"),(0,o.kt)("p",null,"Sum types can naturally be declared also."),(0,o.kt)("details",null,(0,o.kt)("summary",null,"Lets set up some tables for sum types."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'connection.use{ ses =>\n val queries = List(\n sql"drop table if exists owner",\n sql"drop table if exists dog",\n sql"drop table if exists cat",\n sql"""create table owner (\n id int4 primary key\n )""",\n sql"""create table dog (\n id int4 primary key,\n owner_id int4 not null,\n name text not null,\n age int not null\n )""",\n sql"""create table cat (\n id int4 primary key,\n owner_id int4 not null,\n name text not null,\n age int not null\n )""",\n sql"""insert into owner (id) values (1)""",\n sql"""insert into owner (id) values (2)""",\n sql"""insert into dog (id, owner_id, name, age) values (1, 1, \'Dog\', 42)""",\n sql"""insert into cat (id, owner_id, name, age) values (2, 2, \'Cat\', 22)""",\n )\n\n queries.traverse(x => ses.execute(x.command))\n}.unsafeRunSync()\n// res7: List[..skunk.data.Completion] = List(\n// DropTable,\n// DropTable,\n// DropTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1)\n// )\n'))),(0,o.kt)("p",null,"And now we can run it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'sealed trait Animal { \n def name: String\n}\ncase class Dog(owner: String, name: String, age: Int) extends Animal\ncase class Cat(owner: String, name: String, age: Int) extends Animal\n\ntrait OwnerTable extends SkunkTable {\n def table = void"owner"\n val (idCol, id) = sel("id", int4)\n def tableKey = id\n}\ncase class OwnerTableUnion(alias: String) extends OwnerTable\ncase class OwnerTableInterface(alias: String) extends OwnerTable\nval ownerTableUnion = skunkTable(OwnerTableUnion)\n// ownerTableUnion: SkunkTableAlg[OwnerTableUnion] = gql.relational.skunk.dsl$$anon$2@653edaee\nval ownerTableInterface = skunkTable(OwnerTableInterface)\n// ownerTableInterface: SkunkTableAlg[OwnerTableInterface] = gql.relational.skunk.dsl$$anon$2@48d5661a\n\ncase class DogTable(alias: String) extends SkunkTable {\n def table = void"dog"\n\n val (idCol, id) = sel("id", int4)\n val (ownerCol, owner) = sel("owner_id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval dogTable = skunkTable(DogTable)\n// dogTable: SkunkTableAlg[DogTable] = gql.relational.skunk.dsl$$anon$2@51239f12\n\ncase class CatTable(alias: String) extends SkunkTable {\n def table = void"cat"\n\n val (idCol, id) = sel("id", int4)\n val (ownerCol, owner) = sel("owner_id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval catTable = skunkTable(CatTable)\n// catTable: SkunkTableAlg[CatTable] = gql.relational.skunk.dsl$$anon$2@2c40d41e\n\nimplicit lazy val animalInterface = interface[IO, QueryContext[OwnerTableInterface]](\n "AnimalInterface",\n "owner" -> abst[IO, String]\n)\n\nimplicit lazy val cat = tpe[IO, QueryContext[CatTable]](\n "Cat",\n "owner" -> query(_.owner),\n "name" -> query(_.name),\n "age" -> query(_.age)\n).contImplements[OwnerTableInterface]{ owner => \n catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}")\n}\n\nimplicit lazy val dog = tpe[IO, QueryContext[DogTable]](\n "Dog",\n "owner" -> query(_.owner),\n "name" -> query(_.name),\n "age" -> query(_.age)\n).contImplements[OwnerTableInterface]{ owner => \n dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}")\n}\n\n// we use the builder to create a union type\nimplicit lazy val animal = relBuilder[IO, OwnerTableUnion] { b =>\n b\n .union("Animal")\n .contVariant(owner => dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}"))\n .contVariant(owner => catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}"))\n}\n\ndef schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "animals" -> runFieldSingle(connection) { (_: Unit) =>\n ownerTableUnion.join[List](_ => sql"true")\n },\n "animalInterfaces" -> runFieldSingle(connection) { (_: Unit) =>\n ownerTableInterface.join[List](_ => sql"true")\n }\n )\n)\n\ndef animalQuery = """\n query {\n animals {\n __typename\n ... on Dog {\n owner\n name\n age\n }\n ... on Cat {\n owner\n name\n age\n }\n }\n animalInterfaces {\n __typename\n ... on Dog {\n owner\n name\n age\n }\n ... on Cat {\n owner\n name\n age\n }\n }\n }\n"""\n\nschema\n .map(Compiler[IO].compile(_, animalQuery))\n .flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }\n .unsafeRunSync()\n// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id\n// from owner as t1\n// left join dog as t2 on t1.id = t2.owner_id\n// left join cat as t3 on t1.id = t3.owner_id\n// where true\n// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id\n// from owner as t1\n// left join dog as t2 on t1.id = t2.owner_id\n// left join cat as t3 on t1.id = t3.owner_id\n// where true\n// res8: String = """{\n// "data" : {\n// "animalInterfaces" : [\n// {\n// "__typename" : "Cat",\n// "age" : 22,\n// "name" : "Cat",\n// "owner" : 2\n// },\n// {\n// "__typename" : "Dog",\n// "age" : 42,\n// "name" : "Dog",\n// "owner" : 1\n// }\n// ],\n// "animals" : [\n// {\n// "__typename" : "Cat",\n// "age" : 22,\n// "name" : "Cat",\n// "owner" : 2\n// },\n// {\n// "__typename" : "Dog",\n// "age" : 42,\n// "name" : "Dog",\n// "owner" : 1\n// }\n// ]\n// }\n// }"""\n')),(0,o.kt)("h2",{id:"declaring-complex-subqueries"},"Declaring complex subqueries"),(0,o.kt)("p",null,"Sometimes your tables must have complex filtering, limiting, ordering and so on.\nThe most obvious way to declare such parameters is simply to use a subquery."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class ParameterizedPersonTable(alias: String, table: AppliedFragment) extends SkunkTable {\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n \n def tableKey = id\n}\ndef parameterizedPersonTable(\n limitOffset: Option[(Int, Int)],\n order: Option[AppliedFragment],\n filter: Option[AppliedFragment]\n) = skunkTable{ alias => \n val filt = filter.foldMap(f => sql"where ${f.fragment}".apply(f.argument))\n val ord = order.foldMap(f => sql"order by ${f.fragment}".apply(f.argument))\n val lim = \n limitOffset.foldMap{ case (limit, offset) => sql"limit ${int4} offset ${int4}".apply((limit, offset))}\n ParameterizedPersonTable(\n alias,\n sql"""|(\n | select *\n | from person\n | ${filt.fragment}\n | ${ord.fragment}\n | ${lim.fragment}\n |)""".stripMargin.apply((filt.argument, ord.argument, lim.argument))\n )\n}\n')),(0,o.kt)("p",null,"And now we can use our new table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val ppt: Type[IO, QueryContext[ParameterizedPersonTable]] = ???\n\nval personQueryArgs = (\n arg[Option[Int]]("limit"),\n arg[Option[Int]]("offset"),\n arg[Option[Boolean]]("order"),\n arg[Option[Int]]("ageFilter")\n).tupled\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(personQueryArgs) { case (home, (lim, off, ord, af)) =>\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- parameterizedPersonTable(\n limitOffset = (lim, off).tupled,\n order = ord.map{\n case true => void"age desc"\n case false => void"age asc"\n },\n filter = af.map(age => sql"age > ${int4}".apply(age))\n ).join(p => sql"${hp.personCol} = ${p.idCol}")\n } yield p\n }\n)\n')),(0,o.kt)("h2",{id:"using-relational-without-tables"},"Using relational without tables"),(0,o.kt)("p",null,"There is no restriction on how you can implement a table, so you can choose your own strategy.\nFor instance say we just wanted to declare everything up-front and select fields ad-hoc."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.relational.skunk.SkunkIntegration.Query.Select\n\ncase class AdHocTable(\n alias: String, \n table: AppliedFragment,\n tableKey: Select[?],\n) extends SkunkTable\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>\n for {\n hp <- skunkTable(alias => \n AdHocTable(\n alias, \n sql"#${alias}.home_person".apply(Void), \n select(\n int4 ~ int4,\n sql"#${alias}.home_id".apply(Void), \n sql"#${alias}.person_id".apply(Void)\n )\n )\n ).join[List](hp => sql"${home.idCol} = ${hp.aliased(sql"home_id")}")\n p <- personTable.join(p => sql"${hp.aliased(sql".person_id")} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))\n } yield p\n }\n)\n')),(0,o.kt)("p",null,"Since there is no dsl for this, constructing the query is a bit gruesome.\nConsider if a dsl is possible for your formulation."),(0,o.kt)("h2",{id:"running-transactions"},"Running transactions"),(0,o.kt)("p",null,"Most usecases involve running all queries in a transaction, but none of the examples so far have introduces this.\nThe implementation of transactions depends on the database library, but many implementations share common properties."),(0,o.kt)("p",null,"If your database library supports opening transactions as a resource then the you can lazily open a transaction.\nHere is an example using skunk."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'trait SessionContext {\n def getSession: Resource[IO, Session[IO]]\n}\n\nobject SessionContext {\n def fromIOLocal(iol: IOLocal[Option[Resource[IO, Session[IO]]]]) = new SessionContext {\n def getSession = Resource.eval(iol.get).flatMap{\n case None => Resource.eval(IO.raiseError(new Exception("No session in context")))\n case Some(sc) => sc\n }\n }\n}\n\ndef myConnection: Resource[IO, Session[IO]] = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n\n// The outer resource manages the lifecycle of the connection\n// The inner resource leases the connection, if the inner resource is not closed, the outer waits\ndef lazyConnection: Resource[IO, LazyResource[IO, Session[IO]]] = \n gql.relational.LazyResource.fromResource(myConnection)\n\n// We define our schema as requiring a connection\ndef myQuery(ctx: SessionContext): Type[IO, Unit] = {\n implicit lazy val homeTableTpe: Out[IO, QueryContext[HomeTable]] = ???\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(ctx.getSession) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n}\n\ndef runQuery: IO[String => Compiler.Outcome[IO]] = \n gql.Statistics[IO].flatMap{ stats => \n IOLocal[Option[Resource[IO, Session[IO]]]](None).map{ loc =>\n val sc = SessionContext.fromIOLocal(loc)\n\n val schema = gql.Schema.query(stats)(myQuery(sc))\n\n val setResource = lazyConnection.evalMap(x => loc.set(Some(x.get)))\n\n (query: String) => \n Compiler[IO]\n .compile(schema, q)\n .map{\n case gql.Application.Query(fa) => gql.Application.Query(setResource.surround(fa))\n case gql.Application.Mutation(fa) => gql.Application.Mutation(setResource.surround(fa))\n // Subscription is a bit more complex since we would like to close the transaction on every event\n case gql.Application.Subscription(fa) => \n gql.Application.Subscription{\n fs2.Stream.resource(lazyConnection).flatMap{ x =>\n fs2.Stream.exec(loc.set(Some(x.get))) ++\n fa.evalTap(_ => x.forceClose)\n }\n }\n }\n }\n }\n')),(0,o.kt)("details",null,(0,o.kt)("summary",null,"You can also use MTL for passing the transaction around"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.mtl._\n\ndef myConnection: Resource[IO, Session[IO]] = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n\n// The outer resource manages the lifecycle of the connection\n// The inner resource leases the connection, if the inner resource is not closed, the outer waits\ndef lazyConnection: Resource[IO, LazyResource[IO, Session[IO]]] = \n gql.relational.LazyResource.fromResource(myConnection)\n\nval liftK = Kleisli.liftK[IO, Resource[IO, Session[IO]]]\n\ntype GetConn[F[_]] = Ask[F, Resource[F, Session[F]]]\n\ndef makeConn[F[_]](conn: GetConn[F]): Resource[F, Session[F]] = \n Resource.eval(conn.ask[Resource[F, Session[F]]]).flatten\n\n// We define our schema as requiring a connection\ndef myQuery[F[_]: Async](conn: GetConn[F]): Type[F, Unit] = {\n implicit lazy val homeTableTpe: Type[F, QueryContext[HomeTable]] = ???\n tpe[F, Unit](\n "Query",\n "homes" -> runFieldSingle(makeConn(conn)) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n}\n\nimplicit def functorForAsk[F[_]]: Functor[Ask[F, *]] = ???\ndef kleisliAsk[F[_]: Applicative, A] = Ask[Kleisli[F, A, *], A]\n\ndef runQuery: IO[String => Compiler.Outcome[IO]] = \n gql.Statistics[IO].map{ stats => \n type G[A] = Kleisli[IO, Resource[IO, Session[IO]], A]\n\n val liftK = Kleisli.liftK[IO, Resource[IO, Session[IO]]]\n\n val ask: Ask[G, Resource[G, Session[G]]] = \n kleisliAsk[IO, Resource[IO, Session[IO]]].map(_.mapK(liftK).map(_.mapK(liftK)))\n\n val schema = gql.Schema.query(stats.mapK(liftK))(myQuery[G](ask))\n\n val oneshot = lazyConnection.map(_.get.flatTap(_.transaction))\n\n (query: String) => \n Compiler[G]\n .compile(schema, q)\n .map{ \n case gql.Application.Query(fa) => gql.Application.Query(oneshot.useKleisli(fa))\n case gql.Application.Mutation(fa) => gql.Application.Mutation(oneshot.useKleisli(fa))\n // Subscription is a bit more complex since we would like to close the transaction on every event\n case gql.Application.Subscription(fa) => \n gql.Application.Subscription{\n fs2.Stream.resource(lazyConnection).flatMap{ lc =>\n fa\n .translate(Kleisli.applyK[IO, Resource[IO, Session[IO]]](lc.get.flatTap(_.transaction)))\n .evalTap(_ => lc.forceClose)\n }\n }\n }\n }\n'))),(0,o.kt)("h2",{id:"handling-n1"},"Handling N+1"),(0,o.kt)("p",null,"The relational module can handle N+1 queries and queries that can cause cartesian products.\nTo solve N+1, the user must use the ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," method instead of the ",(0,o.kt)("inlineCode",{parentName:"p"},"runFieldSingle"),".\nThe ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," method takes a list of inputs ",(0,o.kt)("inlineCode",{parentName:"p"},"I")," and produces ",(0,o.kt)("inlineCode",{parentName:"p"},"Query[G, (Select[I], B)]"),", such that query results can be reassociated with the inputs."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def myBatchedHomeQuery(conn: Resource[IO, Session[IO]]) = {\n case class MyDatatype(homeId: Int)\n\n tpe[IO, MyDatatype](\n "MyDatatype",\n "home" -> runField[IO, List, MyDatatype, HomeTable](conn) { xs => \n val lst = xs.toList.map(_.homeId)\n for {\n ht <- homeTable.join[List](ht => sql"${ht.idCol} in (${int4.list(lst)})".apply(lst))\n } yield (ht.id.fmap(MyDatatype), ht)\n }\n )\n}\n')),(0,o.kt)("p",null,"To solve the query multiplicity explosions you can use the ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," which works almost like ",(0,o.kt)("inlineCode",{parentName:"p"},"cont"),", except the query will be split up into two queries."),(0,o.kt)("p",null,"The ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," function takes two interesting parameters.\nThe first parameter will be a projection of the current query, decoded into ",(0,o.kt)("inlineCode",{parentName:"p"},"B"),".\nThe second parameter turns this ",(0,o.kt)("inlineCode",{parentName:"p"},"B")," into another query, which will be the root of the new query."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def boundaryQuery(conn: Resource[IO, Session[IO]]) = {\n case class MyDatatype(homeId: Int)\n\n relBuilder[IO, HomeTable]{ rb =>\n rb.tpe(\n "HomeTable",\n "people" -> rb.contBoundary(conn){ home =>\n homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}").map(_.person)\n }{ (xs: NonEmptyList[Int]) =>\n val lst = xs.toList\n personTable.join(p => sql"${p.idCol} in (${int4.list(lst)})".apply(lst)).map(p => p.id -> p)\n }\n )\n }\n}\n')),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," is only available in when using the ",(0,o.kt)("inlineCode",{parentName:"p"},"relBuilder"),", since type inference does not work very well."),(0,o.kt)("p",{parentName:"admonition"},"Inference troubles with ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," can also be alleviated by using the ",(0,o.kt)("inlineCode",{parentName:"p"},"relBuilder"),".")))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[508],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>m});var t=a(7294);function o(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function l(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function i(e){for(var n=1;n=0||(o[a]=e[a]);return o}(e,n);if(Object.getOwnPropertySymbols){var l=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(o[a]=e[a])}return o}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):i(i({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},d=t.forwardRef((function(e,n){var a=e.components,o=e.mdxType,l=e.originalType,s=e.parentName,c=r(e,["components","mdxType","originalType","parentName"]),d=p(a),m=o,g=d["".concat(s,".").concat(m)]||d[m]||u[m]||l;return a?t.createElement(g,i(i({ref:n},c),{},{components:a})):t.createElement(g,i({ref:n},c))}));function m(e,n){var a=arguments,o=n&&n.mdxType;if("string"==typeof e||o){var l=a.length,i=new Array(l);i[0]=d;var r={};for(var s in n)hasOwnProperty.call(n,s)&&(r[s]=n[s]);r.originalType=e,r.mdxType="string"==typeof e?e:o,i[1]=r;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>u,frontMatter:()=>l,metadata:()=>r,toc:()=>p});var t=a(7462),o=(a(7294),a(3905));const l={title:"Relational"},i=void 0,r={unversionedId:"server/integrations/relational",id:"server/integrations/relational",title:"Relational",description:"This integration is fairly new and sofisticated so it can be subject to change.",source:"@site/docs/server/integrations/relational.md",sourceDirName:"server/integrations",slug:"/server/integrations/relational",permalink:"/gql/docs/server/integrations/relational",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/integrations/relational.md",tags:[],version:"current",frontMatter:{title:"Relational"},sidebar:"docs",previous:{title:"Global object identification",permalink:"/gql/docs/server/integrations/goi"},next:{title:"Query DSL",permalink:"/gql/docs/client/dsl"}},s={},p=[{value:"Skunk example",id:"skunk-example",level:2},{value:"Simplifying relationships",id:"simplifying-relationships",level:3},{value:"Runtime semantics",id:"runtime-semantics",level:2},{value:"Implementing your own integration",id:"implementing-your-own-integration",level:2},{value:"Adding arguments",id:"adding-arguments",level:2},{value:"Sum types",id:"sum-types",level:2},{value:"Declaring complex subqueries",id:"declaring-complex-subqueries",level:2},{value:"Using relational without tables",id:"using-relational-without-tables",level:2},{value:"Running transactions",id:"running-transactions",level:2},{value:"Handling N+1",id:"handling-n1",level:2}],c={toc:p};function u(e){let{components:n,...a}=e;return(0,o.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,o.kt)("admonition",{type:"caution"},(0,o.kt)("p",{parentName:"admonition"},"This integration is fairly new and sofisticated so it can be subject to change.")),(0,o.kt)("p",null,"gql also comes with an optional integration for relational databases."),(0,o.kt)("p",null,"The relational integration is library agnostic and is based on query fragments that can be composed into a full query."),(0,o.kt)("p",null,"The relational module ships with two implementations, one for ",(0,o.kt)("inlineCode",{parentName:"p"},"skunk")," and another for ",(0,o.kt)("inlineCode",{parentName:"p"},"doobie"),".\nThey can be found in the ",(0,o.kt)("a",{parentName:"p",href:"../../overview/modules"},"modules")," section."),(0,o.kt)("admonition",{type:"tip"},(0,o.kt)("p",{parentName:"admonition"},"Integrating a new library requires very little code.\nThe skunk integration only spans 18 lines of code.")),(0,o.kt)("h2",{id:"skunk-example"},"Skunk example"),(0,o.kt)("p",null,"For this example we will use ",(0,o.kt)("inlineCode",{parentName:"p"},"skunk"),".\nWe will start off with some imports."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"import skunk._\nimport skunk.codec.all._\nimport skunk.implicits._\nimport gql.ast._\nimport gql.dsl.all._\nimport gql.relational._\nimport gql.relational.skunk.dsl._\nimport gql.relational.skunk.dsl.algebra.QueryContext\nimport cats._\nimport cats.data._\nimport cats.arrow._\nimport cats.effect._\nimport cats.implicits._\n")),(0,o.kt)("p",null,"Before we start declaring fragments, we need to define our domain."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"final case class Home(name: String, address: String)\n// many homes belong to many people\nfinal case class Person(name: String, age: Int)\n// a pet has one owner\nfinal case class Pet(name: String, age: Int, owner: Int)\n")),(0,o.kt)("p",null,"The realtional module also ships with a dsl that makes declaration use conscise.\nWe will start off just declaring the home table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class HomeTable(\n // When a table is queried it must have an alias\n alias: String\n) extends SkunkTable {\n // Note that we use only skunk tools to declare the contents of this structure\n\n // We can declare how this table is referenced in sql (or some other query language)\n def table = void"home"\n\n // The SkunkTable trait gives some convinience methods for declaring columns\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (addressCol, address) = sel("address", text)\n\n // The projection that uniquely identifies a row in the table\n def tableKey = id\n}\n// We get some methods if show how given an alias we can get a table\nval homeTable = skunkTable(HomeTable)\n')),(0,o.kt)("p",null,"We will also need to declare the other two tables, this time with less comments."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class PersonTable(alias: String) extends SkunkTable {\n def table = void"person"\n\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval personTable = skunkTable(PersonTable)\n\ncase class PetTable(alias: String) extends SkunkTable {\n def table = void"pet"\n\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n val (ownerCol, owner) = sel("owner", int4)\n\n def tableKey = id\n}\nval petTable = skunkTable(PetTable)\n')),(0,o.kt)("p",null,"Since ",(0,o.kt)("inlineCode",{parentName:"p"},"Home")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"Person")," have a many to many relationship, we will have to go through another table table to get the relationship."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class HomePersonTable(alias: String) extends SkunkTable {\n def table = void"home_person"\n\n val (homeCol, home) = sel("home_id", int4)\n val (personCol, person) = sel("person_id", int4)\n\n def tableKey = (home, person).tupled\n}\nval homePersonTable = skunkTable(HomePersonTable)\n')),(0,o.kt)("p",null,"Now we can start declaring our graphql schema."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pet: Type[IO, QueryContext[PetTable]] = \n tpe[IO, QueryContext[PetTable]](\n "PetTable",\n "name" -> query(_.name), // query is a method that compiles to a projection in the query language (sql)\n "age" -> query(_.age)\n )\n\nimplicit lazy val person: Type[IO, QueryContext[PersonTable]] = \n tpe[IO, QueryContext[PersonTable]](\n "PersonTable",\n "name" -> query(_.name),\n "age" -> query(_.age),\n "pets" -> cont{ person => // cont is a continuation that will create a new table from the current one\n // The join method takes a type parameter that declares the multiplicity of the join\n // If no type parameter is given, the join is assumed to be one to one\n petTable.join[List]{ pet =>\n // Given an instance of the pet table, we can declare a join predicate\n sql"${pet.ownerCol} = ${person.idCol}"\n }\n }\n )\n\nimplicit lazy val home: Type[IO, QueryContext[HomeTable]] = \n tpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "name" -> query(_.name),\n "address" -> query(_.address),\n "caption" -> query(h => (h.name, h.address).mapN(_ + " at " + _)), // projections form an applicative\n "people" -> cont{ home =>\n // Tables can be flatmapped together\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol}")\n } yield p\n }\n )\n')),(0,o.kt)("p",null,"Now we are done declaring our schema."),(0,o.kt)("p",null,"Before querying it we will need our database up and running."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.effect.unsafe.implicits.global\nimport natchez.noop._ // needed for skunk connection\nimplicit val trace: natchez.Trace[IO] = NoopTrace[IO]()\n\ndef connection = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n')),(0,o.kt)("details",null,(0,o.kt)("summary",null,"We will also need to create our tables and insert some data."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'connection.use{ ses =>\n val queries = List(\n sql"drop table if exists pet",\n sql"drop table if exists home_person",\n sql"drop table if exists person",\n sql"drop table if exists home",\n sql"""create table home_person (\n home_id int not null,\n person_id int not null\n )""",\n sql"""create table pet (\n id int4 primary key,\n name text not null,\n age int not null,\n owner int not null\n )""",\n sql"""create table person (\n id int4 primary key,\n name text not null,\n age int not null\n )""",\n sql"""create table home (\n id int4 primary key,\n name text not null,\n address text not null\n )""",\n sql"""insert into home (id, name, address) values (1, \'Doe Home\', \'123 Main St\')""",\n sql"""insert into person (id, name, age) values (1, \'John Doe\', 42)""",\n sql"""insert into person (id, name, age) values (2, \'Jane Doe\', 40)""",\n sql"""insert into home_person (home_id, person_id) values (1, 1)""", \n sql"""insert into home_person (home_id, person_id) values (1, 2)""",\n sql"""insert into pet (id, name, age, owner) values (1, \'Fluffy\', 2, 1)""",\n )\n\n queries.traverse(x => ses.execute(x.command))\n}.unsafeRunSync()\n// res0: List[..skunk.data.Completion] = List(\n// DropTable,\n// DropTable,\n// DropTable,\n// DropTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1)\n// )\n'))),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(connection) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n)\n\ndef q = """\nquery {\n homes {\n name\n address\n caption\n people {\n name\n age\n pets {\n name\n age\n }\n }\n }\n}\n"""\n\nimport io.circe.syntax._\nimport gql.{Compiler, Application}\nschema\n .map(Compiler[IO].compile(_, q))\n .flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }\n .unsafeRunSync()\n// res1: String = """{\n// "data" : {\n// "homes" : [\n// {\n// "address" : "123 Main St",\n// "caption" : "Doe Home at 123 Main St",\n// "name" : "Doe Home",\n// "people" : [\n// {\n// "age" : 42,\n// "name" : "John Doe",\n// "pets" : [\n// {\n// "age" : 2,\n// "name" : "Fluffy"\n// }\n// ]\n// },\n// {\n// "age" : 40,\n// "name" : "Jane Doe",\n// "pets" : [\n// ]\n// }\n// ]\n// }\n// ]\n// }\n// }"""\n')),(0,o.kt)("p",null,"And thats it!"),(0,o.kt)("p",null,"Just for fun, we check out the generated sql."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.relational.skunk._\nimplicit def logQueries[F[_]: MonadCancelThrow]: SkunkIntegration.Queryable[F] = \n new SkunkIntegration.Queryable[F] {\n def apply[A](\n query: AppliedFragment,\n decoder: Decoder[A], \n connection: SkunkIntegration.Connection[F]\n ): F[List[A]] = {\n println(query.fragment.sql)\n SkunkIntegration.skunkQueryable[F].apply(query, decoder, connection)\n }\n}\n\ndef schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(connection) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n)\n\nschema\n .map(Compiler[IO].compile(_, q))\n .flatMap { case Right(Application.Query(run)) => run.void }\n .unsafeRunSync()\n// select t1.id, t1.address, t1.name, t1.address, t1.name, t2.home_id, t2.person_id, t3.id, t3.age, t3.name, t4.id, t4.age, t4.name\n// from home as t1\n// left join home_person as t2 on t1.id = t2.home_id\n// left join person as t3 on t2.person_id = t3.id\n// left join pet as t4 on t4.owner = t3.id\n// where true\n')),(0,o.kt)("h3",{id:"simplifying-relationships"},"Simplifying relationships"),(0,o.kt)("p",null,"The join between ",(0,o.kt)("inlineCode",{parentName:"p"},"home")," and ",(0,o.kt)("inlineCode",{parentName:"p"},"person")," can be a bit daunting, since you have to keep track of multiplicity yourself.\nInstead we can use the database to handle some of the multiplicity for us by generalizing the person table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class SharedPersonTable(alias: String, table: AppliedFragment) extends SkunkTable {\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\n\nval sharedPersonTable = skunkTable(SharedPersonTable(_, void"person"))\n\nval homePersonQuery = void"(select * from home_person inner join person on home_person.person_id = person.id)"\nval sharedHomePersonTable = skunkTable(SharedPersonTable(_, homePersonQuery))\n\n// And now using our subquery we can simplify the join.\nimplicit lazy val person: Type[IO, QueryContext[SharedPersonTable]] = ???\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "name" -> query(_.name),\n "address" -> query(_.address),\n "caption" -> query(h => (h.name, h.address).mapN(_ + " at " + _)), // projections form an applicative\n "people" -> cont{ h => \n sharedHomePersonTable.join[List](hp => sql"${h.idCol} = ${hp.aliased(sql"home_id")}")\n }\n)\n')),(0,o.kt)("h2",{id:"runtime-semantics"},"Runtime semantics"),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"This section is a technical reference, and not necessary to use the library.")),(0,o.kt)("p",null,"Data emitted by SQL is not hierarchical, but instead flat; for it to map well to graphql, which is hierarchical some work must be performed.\nMost use-cases are covered by simply invoking the ",(0,o.kt)("inlineCode",{parentName:"p"},"join")," method with the proper multiplicity parameter."),(0,o.kt)("p",null,"When your AST is inspected to build a query, a recursive AST walk composes a big reassociation function that can translate flat query results into the proper hierarchical structure.\nThis composed function also tracks the visited columns and their decoders."),(0,o.kt)("p",null,"The query algebra has a special operation that lets the caller modify the state however they wish.\nThe dsl uses this state modification for various tasks, such as providing a convinient ",(0,o.kt)("inlineCode",{parentName:"p"},"join")," method that both joins a table and performs the proper reassociation of results.\nConsider the following example that joins a table more explicitly."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"val q1 = for {\n ht <- homeTable.simpleJoin(_ => void\"true\")\n _ <- reassociate[List](ht.tableKey)\n // some other reassociation criteria\n _ <- reassociate[Option](select(int4, void\"42\"))\n} yield ht\n// q1: algebra.Query[[X]List[Option[X]], HomeTable] = FlatMap(\n// fa = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@625f3240)),\n// f = gql.relational.QueryDsl$$Lambda$13935/0x00000008039a7040@2ccab511\n// ),\n// f = \n// )\n\n// we can perform reassociation before performing the actions in 'q1'\nval q2 = reassociate[Option](select(text, void\"'john doe'\")).flatMap(_ => q1)\n// q2: algebra.Query[[X]Option[List[Option[X]]], HomeTable] = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@612fb68)),\n// f = \n// )\n\n// we can also change the result structure after performing the actions in 'q2'\nq2.mapK[List](new (\u03bb[X => Option[List[Option[X]]]] ~> List) {\n def apply[A](fa: Option[List[Option[A]]]): List[A] = fa.toList.flatten.flatMap(_.toList)\n})\n// res4: algebra.Query[List, HomeTable] = LiftEffect(\n// fa = EitherT(value = cats.data.IndexedStateT@6e7c90ca)\n// )\n")),(0,o.kt)("p",null,"Accessing the lowlevel state also lets the user perform other tasks such as unique id (new alias) generation."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"for {\n alias1 <- newAlias\n alias2 <- newAlias\n} yield ()\n// res5: algebra.Query[[X]X, Unit] = FlatMap(\n// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@752a7277)),\n// f = \n// )\n")),(0,o.kt)("h2",{id:"implementing-your-own-integration"},"Implementing your own integration"),(0,o.kt)("p",null,"The entire dsl and query compiler is available if you implement a couple of methods."),(0,o.kt)("p",null,"Here is the full skunk integration."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import _root_.{skunk => sk}\nobject MyIntegration extends QueryAlgebra {\n // What is a fragment\n type Frag = sk.AppliedFragment\n // How do we transform a string into a fragment\n def stringToFrag(s: String): Frag = sql"#${s}".apply(Void)\n // Combine and create empty fragments\n implicit def appliedFragmentMonoid: Monoid[Frag] = sk.AppliedFragment.MonoidAppFragment\n // How do we decode values\n type Decoder[A] = sk.Decoder[A]\n // How can we combine decoders\n implicit def applicativeForDecoder: Applicative[Decoder] = Decoder.ApplicativeDecoder\n // How do we make an optional decoder\n def optDecoder[A](d: Decoder[A]): Decoder[Option[A]] = d.opt\n // What is needed to perform a query\n type Connection[F[_]] = Resource[F, Session[F]]\n // Given a connection, how do we use it\n implicit def skunkQueryable[F[_]: MonadCancelThrow]: Queryable[F] = new Queryable[F] {\n def apply[A](query: AppliedFragment, decoder: Decoder[A], connection: Connection[F]): F[List[A]] =\n connection.use(_.execute(query.fragment.query(decoder))(query.argument))\n }\n}\n')),(0,o.kt)("p",null,"The dsl can be instantiated for any query algebra."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"object myDsl extends QueryDsl(MyIntegration)\n")),(0,o.kt)("p",null,"you can also add integration specific methods to your dsl."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},"object myDsl extends QueryDsl(MyIntegration) {\n def someOperationSpecificToMyIntegration = ???\n}\n")),(0,o.kt)("h2",{id:"adding-arguments"},"Adding arguments"),(0,o.kt)("p",null,"All field combinators allow arguments to be provided naturally, regardless of where the field is in the query."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val pt: Type[IO, QueryContext[PersonTable]] = ???\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))\n } yield p\n }\n)\n')),(0,o.kt)("h2",{id:"sum-types"},"Sum types"),(0,o.kt)("p",null,"Sum types can naturally be declared also."),(0,o.kt)("details",null,(0,o.kt)("summary",null,"Lets set up some tables for sum types."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'connection.use{ ses =>\n val queries = List(\n sql"drop table if exists owner",\n sql"drop table if exists dog",\n sql"drop table if exists cat",\n sql"""create table owner (\n id int4 primary key\n )""",\n sql"""create table dog (\n id int4 primary key,\n owner_id int4 not null,\n name text not null,\n age int not null\n )""",\n sql"""create table cat (\n id int4 primary key,\n owner_id int4 not null,\n name text not null,\n age int not null\n )""",\n sql"""insert into owner (id) values (1)""",\n sql"""insert into owner (id) values (2)""",\n sql"""insert into dog (id, owner_id, name, age) values (1, 1, \'Dog\', 42)""",\n sql"""insert into cat (id, owner_id, name, age) values (2, 2, \'Cat\', 22)""",\n )\n\n queries.traverse(x => ses.execute(x.command))\n}.unsafeRunSync()\n// res7: List[..skunk.data.Completion] = List(\n// DropTable,\n// DropTable,\n// DropTable,\n// CreateTable,\n// CreateTable,\n// CreateTable,\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1),\n// Insert(count = 1)\n// )\n'))),(0,o.kt)("p",null,"And now we can run it."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'sealed trait Animal { \n def name: String\n}\ncase class Dog(owner: String, name: String, age: Int) extends Animal\ncase class Cat(owner: String, name: String, age: Int) extends Animal\n\ntrait OwnerTable extends SkunkTable {\n def table = void"owner"\n val (idCol, id) = sel("id", int4)\n def tableKey = id\n}\ncase class OwnerTableUnion(alias: String) extends OwnerTable\ncase class OwnerTableInterface(alias: String) extends OwnerTable\nval ownerTableUnion = skunkTable(OwnerTableUnion)\n// ownerTableUnion: SkunkTableAlg[OwnerTableUnion] = gql.relational.skunk.dsl$$anon$2@cb123fe\nval ownerTableInterface = skunkTable(OwnerTableInterface)\n// ownerTableInterface: SkunkTableAlg[OwnerTableInterface] = gql.relational.skunk.dsl$$anon$2@1de940bb\n\ncase class DogTable(alias: String) extends SkunkTable {\n def table = void"dog"\n\n val (idCol, id) = sel("id", int4)\n val (ownerCol, owner) = sel("owner_id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval dogTable = skunkTable(DogTable)\n// dogTable: SkunkTableAlg[DogTable] = gql.relational.skunk.dsl$$anon$2@1719790c\n\ncase class CatTable(alias: String) extends SkunkTable {\n def table = void"cat"\n\n val (idCol, id) = sel("id", int4)\n val (ownerCol, owner) = sel("owner_id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n\n def tableKey = id\n}\nval catTable = skunkTable(CatTable)\n// catTable: SkunkTableAlg[CatTable] = gql.relational.skunk.dsl$$anon$2@31860852\n\nimplicit lazy val animalInterface = interface[IO, QueryContext[OwnerTableInterface]](\n "AnimalInterface",\n "owner" -> abst[IO, String]\n)\n\nimplicit lazy val cat = tpe[IO, QueryContext[CatTable]](\n "Cat",\n "owner" -> query(_.owner),\n "name" -> query(_.name),\n "age" -> query(_.age)\n).contImplements[OwnerTableInterface]{ owner => \n catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}")\n}\n\nimplicit lazy val dog = tpe[IO, QueryContext[DogTable]](\n "Dog",\n "owner" -> query(_.owner),\n "name" -> query(_.name),\n "age" -> query(_.age)\n).contImplements[OwnerTableInterface]{ owner => \n dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}")\n}\n\n// we use the builder to create a union type\nimplicit lazy val animal = relBuilder[IO, OwnerTableUnion] { b =>\n b\n .union("Animal")\n .contVariant(owner => dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}"))\n .contVariant(owner => catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}"))\n}\n\ndef schema = gql.Schema.query(\n tpe[IO, Unit](\n "Query",\n "animals" -> runFieldSingle(connection) { (_: Unit) =>\n ownerTableUnion.join[List](_ => sql"true")\n },\n "animalInterfaces" -> runFieldSingle(connection) { (_: Unit) =>\n ownerTableInterface.join[List](_ => sql"true")\n }\n )\n)\n\ndef animalQuery = """\n query {\n animals {\n __typename\n ... on Dog {\n owner\n name\n age\n }\n ... on Cat {\n owner\n name\n age\n }\n }\n animalInterfaces {\n __typename\n ... on Dog {\n owner\n name\n age\n }\n ... on Cat {\n owner\n name\n age\n }\n }\n }\n"""\n\nschema\n .map(Compiler[IO].compile(_, animalQuery))\n .flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }\n .unsafeRunSync()\n// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id\n// from owner as t1\n// left join dog as t2 on t1.id = t2.owner_id\n// left join cat as t3 on t1.id = t3.owner_id\n// where true\n// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id\n// from owner as t1\n// left join dog as t2 on t1.id = t2.owner_id\n// left join cat as t3 on t1.id = t3.owner_id\n// where true\n// res8: String = """{\n// "data" : {\n// "animalInterfaces" : [\n// {\n// "__typename" : "Cat",\n// "age" : 22,\n// "name" : "Cat",\n// "owner" : 2\n// },\n// {\n// "__typename" : "Dog",\n// "age" : 42,\n// "name" : "Dog",\n// "owner" : 1\n// }\n// ],\n// "animals" : [\n// {\n// "__typename" : "Cat",\n// "age" : 22,\n// "name" : "Cat",\n// "owner" : 2\n// },\n// {\n// "__typename" : "Dog",\n// "age" : 42,\n// "name" : "Dog",\n// "owner" : 1\n// }\n// ]\n// }\n// }"""\n')),(0,o.kt)("h2",{id:"declaring-complex-subqueries"},"Declaring complex subqueries"),(0,o.kt)("p",null,"Sometimes your tables must have complex filtering, limiting, ordering and so on.\nThe most obvious way to declare such parameters is simply to use a subquery."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'case class ParameterizedPersonTable(alias: String, table: AppliedFragment) extends SkunkTable {\n val (idCol, id) = sel("id", int4)\n val (nameCol, name) = sel("name", text)\n val (ageCol, age) = sel("age", int4)\n \n def tableKey = id\n}\ndef parameterizedPersonTable(\n limitOffset: Option[(Int, Int)],\n order: Option[AppliedFragment],\n filter: Option[AppliedFragment]\n) = skunkTable{ alias => \n val filt = filter.foldMap(f => sql"where ${f.fragment}".apply(f.argument))\n val ord = order.foldMap(f => sql"order by ${f.fragment}".apply(f.argument))\n val lim = \n limitOffset.foldMap{ case (limit, offset) => sql"limit ${int4} offset ${int4}".apply((limit, offset))}\n ParameterizedPersonTable(\n alias,\n sql"""|(\n | select *\n | from person\n | ${filt.fragment}\n | ${ord.fragment}\n | ${lim.fragment}\n |)""".stripMargin.apply((filt.argument, ord.argument, lim.argument))\n )\n}\n')),(0,o.kt)("p",null,"And now we can use our new table."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'implicit lazy val ppt: Type[IO, QueryContext[ParameterizedPersonTable]] = ???\n\nval personQueryArgs = (\n arg[Option[Int]]("limit"),\n arg[Option[Int]]("offset"),\n arg[Option[Boolean]]("order"),\n arg[Option[Int]]("ageFilter")\n).tupled\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(personQueryArgs) { case (home, (lim, off, ord, af)) =>\n for {\n hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")\n p <- parameterizedPersonTable(\n limitOffset = (lim, off).tupled,\n order = ord.map{\n case true => void"age desc"\n case false => void"age asc"\n },\n filter = af.map(age => sql"age > ${int4}".apply(age))\n ).join(p => sql"${hp.personCol} = ${p.idCol}")\n } yield p\n }\n)\n')),(0,o.kt)("h2",{id:"using-relational-without-tables"},"Using relational without tables"),(0,o.kt)("p",null,"There is no restriction on how you can implement a table, so you can choose your own strategy.\nFor instance say we just wanted to declare everything up-front and select fields ad-hoc."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.relational.skunk.SkunkIntegration.Query.Select\n\ncase class AdHocTable(\n alias: String, \n table: AppliedFragment,\n tableKey: Select[?],\n) extends SkunkTable\n\ntpe[IO, QueryContext[HomeTable]](\n "HomeTable",\n "people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>\n for {\n hp <- skunkTable(alias => \n AdHocTable(\n alias, \n sql"#${alias}.home_person".apply(Void), \n select(\n int4 ~ int4,\n sql"#${alias}.home_id".apply(Void), \n sql"#${alias}.person_id".apply(Void)\n )\n )\n ).join[List](hp => sql"${home.idCol} = ${hp.aliased(sql"home_id")}")\n p <- personTable.join(p => sql"${hp.aliased(sql".person_id")} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))\n } yield p\n }\n)\n')),(0,o.kt)("p",null,"Since there is no dsl for this, constructing the query is a bit gruesome.\nConsider if a dsl is possible for your formulation."),(0,o.kt)("h2",{id:"running-transactions"},"Running transactions"),(0,o.kt)("p",null,"Most usecases involve running all queries in a transaction, but none of the examples so far have introduces this.\nThe implementation of transactions depends on the database library, but many implementations share common properties."),(0,o.kt)("p",null,"If your database library supports opening transactions as a resource then the you can lazily open a transaction.\nHere is an example using skunk."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'trait SessionContext {\n def getSession: Resource[IO, Session[IO]]\n}\n\nobject SessionContext {\n def fromIOLocal(iol: IOLocal[Option[Resource[IO, Session[IO]]]]) = new SessionContext {\n def getSession = Resource.eval(iol.get).flatMap{\n case None => Resource.eval(IO.raiseError(new Exception("No session in context")))\n case Some(sc) => sc\n }\n }\n}\n\ndef myConnection: Resource[IO, Session[IO]] = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n\n// The outer resource manages the lifecycle of the connection\n// The inner resource leases the connection, if the inner resource is not closed, the outer waits\ndef lazyConnection: Resource[IO, LazyResource[IO, Session[IO]]] = \n gql.relational.LazyResource.fromResource(myConnection)\n\n// We define our schema as requiring a connection\ndef myQuery(ctx: SessionContext): Type[IO, Unit] = {\n implicit lazy val homeTableTpe: Out[IO, QueryContext[HomeTable]] = ???\n tpe[IO, Unit](\n "Query",\n "homes" -> runFieldSingle(ctx.getSession) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n}\n\ndef runQuery: IO[String => Compiler.Outcome[IO]] = \n gql.Statistics[IO].flatMap{ stats => \n IOLocal[Option[Resource[IO, Session[IO]]]](None).map{ loc =>\n val sc = SessionContext.fromIOLocal(loc)\n\n val schema = gql.Schema.query(stats)(myQuery(sc))\n\n val setResource = lazyConnection.evalMap(x => loc.set(Some(x.get)))\n\n (query: String) => \n Compiler[IO]\n .compile(schema, q)\n .map{\n case gql.Application.Query(fa) => gql.Application.Query(setResource.surround(fa))\n case gql.Application.Mutation(fa) => gql.Application.Mutation(setResource.surround(fa))\n // Subscription is a bit more complex since we would like to close the transaction on every event\n case gql.Application.Subscription(fa) => \n gql.Application.Subscription{\n fs2.Stream.resource(lazyConnection).flatMap{ x =>\n fs2.Stream.exec(loc.set(Some(x.get))) ++\n fa.evalTap(_ => x.forceClose)\n }\n }\n }\n }\n }\n')),(0,o.kt)("details",null,(0,o.kt)("summary",null,"You can also use MTL for passing the transaction around"),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'import cats.mtl._\n\ndef myConnection: Resource[IO, Session[IO]] = Session.single[IO](\n host = "127.0.0.1",\n port = 5432,\n user = "postgres",\n database = "postgres"\n)\n\n// The outer resource manages the lifecycle of the connection\n// The inner resource leases the connection, if the inner resource is not closed, the outer waits\ndef lazyConnection: Resource[IO, LazyResource[IO, Session[IO]]] = \n gql.relational.LazyResource.fromResource(myConnection)\n\nval liftK = Kleisli.liftK[IO, Resource[IO, Session[IO]]]\n\ntype GetConn[F[_]] = Ask[F, Resource[F, Session[F]]]\n\ndef makeConn[F[_]](conn: GetConn[F]): Resource[F, Session[F]] = \n Resource.eval(conn.ask[Resource[F, Session[F]]]).flatten\n\n// We define our schema as requiring a connection\ndef myQuery[F[_]: Async](conn: GetConn[F]): Type[F, Unit] = {\n implicit lazy val homeTableTpe: Type[F, QueryContext[HomeTable]] = ???\n tpe[F, Unit](\n "Query",\n "homes" -> runFieldSingle(makeConn(conn)) { (_: Unit) => \n homeTable.join[List](_ => sql"true")\n }\n )\n}\n\nimplicit def functorForAsk[F[_]]: Functor[Ask[F, *]] = ???\ndef kleisliAsk[F[_]: Applicative, A] = Ask[Kleisli[F, A, *], A]\n\ndef runQuery: IO[String => Compiler.Outcome[IO]] = \n gql.Statistics[IO].map{ stats => \n type G[A] = Kleisli[IO, Resource[IO, Session[IO]], A]\n\n val liftK = Kleisli.liftK[IO, Resource[IO, Session[IO]]]\n\n val ask: Ask[G, Resource[G, Session[G]]] = \n kleisliAsk[IO, Resource[IO, Session[IO]]].map(_.mapK(liftK).map(_.mapK(liftK)))\n\n val schema = gql.Schema.query(stats.mapK(liftK))(myQuery[G](ask))\n\n val oneshot = lazyConnection.map(_.get.flatTap(_.transaction))\n\n (query: String) => \n Compiler[G]\n .compile(schema, q)\n .map{ \n case gql.Application.Query(fa) => gql.Application.Query(oneshot.useKleisli(fa))\n case gql.Application.Mutation(fa) => gql.Application.Mutation(oneshot.useKleisli(fa))\n // Subscription is a bit more complex since we would like to close the transaction on every event\n case gql.Application.Subscription(fa) => \n gql.Application.Subscription{\n fs2.Stream.resource(lazyConnection).flatMap{ lc =>\n fa\n .translate(Kleisli.applyK[IO, Resource[IO, Session[IO]]](lc.get.flatTap(_.transaction)))\n .evalTap(_ => lc.forceClose)\n }\n }\n }\n }\n'))),(0,o.kt)("h2",{id:"handling-n1"},"Handling N+1"),(0,o.kt)("p",null,"The relational module can handle N+1 queries and queries that can cause cartesian products.\nTo solve N+1, the user must use the ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," method instead of the ",(0,o.kt)("inlineCode",{parentName:"p"},"runFieldSingle"),".\nThe ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," method takes a list of inputs ",(0,o.kt)("inlineCode",{parentName:"p"},"I")," and produces ",(0,o.kt)("inlineCode",{parentName:"p"},"Query[G, (Select[I], B)]"),", such that query results can be reassociated with the inputs."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def myBatchedHomeQuery(conn: Resource[IO, Session[IO]]) = {\n case class MyDatatype(homeId: Int)\n\n tpe[IO, MyDatatype](\n "MyDatatype",\n "home" -> runField[IO, List, MyDatatype, HomeTable](conn) { xs => \n val lst = xs.toList.map(_.homeId)\n for {\n ht <- homeTable.join[List](ht => sql"${ht.idCol} in (${int4.list(lst)})".apply(lst))\n } yield (ht.id.fmap(MyDatatype), ht)\n }\n )\n}\n')),(0,o.kt)("p",null,"To solve the query multiplicity explosions you can use the ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," which works almost like ",(0,o.kt)("inlineCode",{parentName:"p"},"cont"),", except the query will be split up into two queries."),(0,o.kt)("p",null,"The ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," function takes two interesting parameters.\nThe first parameter will be a projection of the current query, decoded into ",(0,o.kt)("inlineCode",{parentName:"p"},"B"),".\nThe second parameter turns this ",(0,o.kt)("inlineCode",{parentName:"p"},"B")," into another query, which will be the root of the new query."),(0,o.kt)("pre",null,(0,o.kt)("code",{parentName:"pre",className:"language-scala"},'def boundaryQuery(conn: Resource[IO, Session[IO]]) = {\n case class MyDatatype(homeId: Int)\n\n relBuilder[IO, HomeTable]{ rb =>\n rb.tpe(\n "HomeTable",\n "people" -> rb.contBoundary(conn){ home =>\n homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}").map(_.person)\n }{ (xs: NonEmptyList[Int]) =>\n val lst = xs.toList\n personTable.join(p => sql"${p.idCol} in (${int4.list(lst)})".apply(lst)).map(p => p.id -> p)\n }\n )\n }\n}\n')),(0,o.kt)("admonition",{type:"info"},(0,o.kt)("p",{parentName:"admonition"},"The ",(0,o.kt)("inlineCode",{parentName:"p"},"contBoundary")," is only available in when using the ",(0,o.kt)("inlineCode",{parentName:"p"},"relBuilder"),", since type inference does not work very well."),(0,o.kt)("p",{parentName:"admonition"},"Inference troubles with ",(0,o.kt)("inlineCode",{parentName:"p"},"runField")," can also be alleviated by using the ",(0,o.kt)("inlineCode",{parentName:"p"},"relBuilder"),".")))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/677daa8b.a97d5fbe.js b/assets/js/677daa8b.0ae8387b.js similarity index 98% rename from assets/js/677daa8b.a97d5fbe.js rename to assets/js/677daa8b.0ae8387b.js index 9bc2728a..10a4cac1 100644 --- a/assets/js/677daa8b.a97d5fbe.js +++ b/assets/js/677daa8b.0ae8387b.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[672],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>u});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function l(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):l(l({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},d={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(a),u=r,v=m["".concat(s,".").concat(u)]||m[u]||d[u]||i;return a?t.createElement(v,l(l({ref:n},c),{},{components:a})):t.createElement(v,l({ref:n},c))}));function u(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:r,l[1]=o;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var t=a(7462),r=(a(7294),a(3905));const i={title:"Monadic Resolver DSL"},l=void 0,o={unversionedId:"server/schema/arrow_dsl",id:"server/schema/arrow_dsl",title:"Monadic Resolver DSL",description:"Modelling complex evaluation with Resolvers can be tricky.",source:"@site/docs/server/schema/arrow_dsl.md",sourceDirName:"server/schema",slug:"/server/schema/arrow_dsl",permalink:"/gql/docs/server/schema/arrow_dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/arrow_dsl.md",tags:[],version:"current",frontMatter:{title:"Monadic Resolver DSL"},sidebar:"docs",previous:{title:"The DSL",permalink:"/gql/docs/server/schema/dsl"},next:{title:"Resolvers",permalink:"/gql/docs/server/schema/resolvers"}},s={},p=[{value:"Technical details",id:"technical-details",level:3},{value:"Builder extensions",id:"builder-extensions",level:3},{value:"Composition",id:"composition",level:3},{value:"Toplevel expressions",id:"toplevel-expressions",level:4},{value:"Lifting arguments",id:"lifting-arguments",level:2},{value:"Choice",id:"choice",level:2},{value:"Batching example",id:"batching-example",level:2},{value:"Arrowless final?",id:"arrowless-final",level:2}],c={toc:p};function d(e){let{components:n,...a}=e;return(0,r.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Modelling complex evaluation with ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"s can be tricky.\nIt often involves using ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," to pair up an arrow's result with it's input and proceeding with ",(0,r.kt)("inlineCode",{parentName:"p"},"map")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap"),"."),(0,r.kt)("p",null,"Gql introduces a in-language monadic arrow dsl that re-writes a monadic arrow expression into a series of ",(0,r.kt)("inlineCode",{parentName:"p"},"map"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," invocations."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"This feature is akin to the ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," ",(0,r.kt)("a",{parentName:"p",href:"https://en.wikibooks.org/wiki/Haskell/Arrow_tutorial#Arrow_proc_notation"},"notation in Haskell"),".")),(0,r.kt)("p",null,"Using the notation is straightforward, the same (covariant) combinators for ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," exist in the arrow dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.resolver._\nimport cats.implicits._\nimport cats.effect._\nimport gql.arrow._\n\n// Bind the effect type (IO) to aid with compiler errors and inference\nval d = dsl[IO]\nimport d._\nval r: Resolver[IO, Int, String] = \n proc[Int] { i: Var[Int] =>\n for {\n a <- i.evalMap(x => IO(x + 2))\n b <- a.evalMap(x => IO(x * 3))\n c <- (a, b).tupled.evalMap{ case (aa, bb) => IO(aa + bb) }\n } yield c.map(_.toString)\n }\n")),(0,r.kt)("details",null,(0,r.kt)("summary",null,"Most syntatic extensions don't make much sense unless the arrow type (Resolver) is bound which requires knowing the effect type. The full monadic arrows language is available as toplevel functions also."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.arrow.{Language => L}\nL.proc[Resolver[IO, *, *], Int, String] { i =>\n for {\n x <- L.declare[Resolver[IO, *, *], Int, Int](i)(Resolver.lift[IO, Int](z => z * 2))\n y <- L.declare[Resolver[IO, *, *], (Int, Int), String]((x, x).tupled)(Resolver.lift[IO, (Int, Int)]{ case (a, b) => (a + b).toString() })\n } yield y\n}\n// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@1f4cd637\n"))),(0,r.kt)("p",null,"The underlying arrow is also available for composition via ",(0,r.kt)("inlineCode",{parentName:"p"},"apply"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"proc[Int] { i =>\n for {\n x <- i(_.evalMap(z => IO(z + 1)))\n out <- x.apply(_.map(_.toString))\n } yield out\n}\n")),(0,r.kt)("h3",{id:"technical-details"},"Technical details"),(0,r.kt)("p",null,"The dsl introduces two datatypes, ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl"),"."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"Var")," is a reference to a set of variables that occur in the arrow. ",(0,r.kt)("inlineCode",{parentName:"li"},"Var")," forms an ",(0,r.kt)("inlineCode",{parentName:"li"},"Applicative"),"."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"Decl")," is used to re-write the monadic (",(0,r.kt)("inlineCode",{parentName:"li"},"flatMap"),") structure into an arrow. ",(0,r.kt)("inlineCode",{parentName:"li"},"Decl")," forms a ",(0,r.kt)("inlineCode",{parentName:"li"},"Monad"),".")),(0,r.kt)("p",null,"The primary use of ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl")," is to bind variables.\nEvery transformation on a ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable introduces a new ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable which is stored in the ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl")," structure."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Since ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative")," that implies that ",(0,r.kt)("inlineCode",{parentName:"p"},"map")," is available for ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"map")," for ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," is not memoized since it does not lift ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," has an extension ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/typelevel/cats/blob/c8aabcacd6045b9aed5c8626c4bf5308dd3f4912/core/src/main/scala/cats/arrow/Profunctor.scala#L59"},(0,r.kt)("inlineCode",{parentName:"a"},"rmap"))," which introduces a new ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable that memoizes the result.\nThat is, the following equivalences holds:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"declare((v: Var[A]).map(f))(Resolver.id[F, A]) <-> \n (v: Var[A]).rmap(f) <->\n (v: Var[A]).apply(_.map(f))\n"))),(0,r.kt)("p",null,"Closures are illegal in the dsl, as they are refer to variables that are not guaranteed to be available, so prefer invoking ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," once per ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"println {\n scala.util.Try {\n proc[Int] { i =>\n for {\n x <- i.evalMap(x => IO(x + 2))\n o <- x.andThen(proc[Int]{ _ =>\n x.rmap(y => y + 2)\n })\n } yield o\n }\n }.toEither.leftMap(_.getMessage)\n}\n// Left(Variable closure error.\n// Variable declared at arrow_dsl.md:70.\n// Compilation initiated at arrow_dsl.md:68.\n// Variables that were not declared in this scope may not be referenced.\n// Example:\n// ```\n// proc[Int]{ i =>\n// for {\n// x <- i.apply(_.map(_ + 1))\n// y <- i.apply(_.andThen(proc[Int]{ _ =>\n// // referencing 'x' here is an error\n// x.apply(_.map(_ + 1))\n// }))\n// } yield y\n// }\n// ```)\n")),(0,r.kt)("h3",{id:"builder-extensions"},"Builder extensions"),(0,r.kt)("p",null,"The dsl includes an extension method to ",(0,r.kt)("inlineCode",{parentName:"p"},"FieldBuilder")," that eases construction of ",(0,r.kt)("inlineCode",{parentName:"p"},"Field"),"s.\nThe dsl also enhances any resolver with a ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," extension method."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.ast._\n\nval gqlDsl = gql.dsl.GqlDsl[IO]\nimport gqlDsl._\n\nbuilder[Unit]{ b =>\n b.tpe(\n "MyType",\n "field" -> b.proc{ i =>\n for {\n x <- i.evalMap(_ => IO(1 + 2))\n y <- x.rmap(_ + 3)\n } yield y\n },\n "otherField" -> b(_.proc{ i =>\n i.evalMap(_ => IO(1 + 2))\n })\n )\n}\n')),(0,r.kt)("h3",{id:"composition"},"Composition"),(0,r.kt)("p",null,"Sharing common sub-arrows is a desirable property.\nThis can is expressed naturally with the dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def mulDiv(i: Var[Int]): Decl[Var[Int]] = for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n} yield y\n\nproc[Int](mulDiv(_) >>= mulDiv)\n// res4: Resolver[IO, Int, Int] = gql.resolver.Resolver@25ef4a6f\n\nproc[Int](mulDiv(_) >>= mulDiv >>= mulDiv)\n// res5: Resolver[IO, Int, Int] = gql.resolver.Resolver@53fdfaf6\n")),(0,r.kt)("h4",{id:"toplevel-expressions"},"Toplevel expressions"),(0,r.kt)("p",null,"It is recommended to always work in a scope with your effect type (",(0,r.kt)("inlineCode",{parentName:"p"},"F"),") bound, to ease inference and type signatures.\nThere is however support for toplevel proc resolver expressions."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def toplevelMulDiv[F[_]](i: Var[Int]): ResolverDecl[F, Var[Int]] = {\n val d = dsl[F]\n import d._\n for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n}\n")),(0,r.kt)("p",null,"Passing the dsl as an implicit parameter is also an option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def toplevelMulDiv[F[_]](i: Var[Int])(implicit d: ResolverArrowDsl[F]): ResolverDecl[F, Var[Int]] = {\n import d._\n for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n}\n")),(0,r.kt)("h2",{id:"lifting-arguments"},"Lifting arguments"),(0,r.kt)("p",null,"Request arguments is made easier by the arrow dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'proc[Int] { i =>\n for {\n x <- i.evalMap(x => IO(x + 2))\n y <- argument(arg[Int]("age"))\n z <- (x, y).tupled.evalMap { case (a, b) => IO(a + b) }\n } yield z\n}\n')),(0,r.kt)("h2",{id:"choice"},"Choice"),(0,r.kt)("p",null,"The dsl also covers ",(0,r.kt)("inlineCode",{parentName:"p"},"ArrowChoice"),"'s ",(0,r.kt)("inlineCode",{parentName:"p"},"choice")," combinator."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'proc[Int] { i =>\n for {\n x <- i.rmap(v => if (v > 5) Left(v) else Right(v))\n y <- x.choice(\n l => l.rmap(_ * 2),\n r => for {\n a <- argument(arg[Int]("age"))\n out <- (a, r, i).tupled.rmap{ case (a, b, c) => a + b + c }\n } yield out\n )\n } yield y\n}\n')),(0,r.kt)("h2",{id:"batching-example"},"Batching example"),(0,r.kt)("p",null,"Some steps commonly occur when writing batched resolvers:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Pulling an id out of the parent datatype."),(0,r.kt)("li",{parentName:"ol"},"Passing the id to a batching resolver."),(0,r.kt)("li",{parentName:"ol"},"Pairing the batched output with the parent datatype.")),(0,r.kt)("p",null,"This pairing requires some clever use of ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap/lmap"),".\nThis behaviour is much easier to express monadically since we have access to closures."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def getAddresses(ids: Set[Int]): IO[Map[Int, String]] =\n IO(ids.toList.map(id => id -> s"Address $id").toMap)\n\ncase class DataType(id: Int, name: String)\nproc[DataType] { i =>\n for {\n id <- i.rmap(_.id)\n r = Resolver.inlineBatch[IO, Int, String](getAddresses).opt\n (addr: Var[Option[String]]) <- id.andThen(r)\n p = (i, addr).tupled\n out <- p.rmap{ case (dt, a) => s"${dt.name} @ ${a.getOrElse("")}" }\n } yield out\n}\n')),(0,r.kt)("h2",{id:"arrowless-final"},"Arrowless final?"),(0,r.kt)("p",null,"Expressions can be declared for any arrow, not just ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),".\nThe usefullness of this property is not significant, but an interesting property nonetheless."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import cats.free._\nimport cats.arrow._\ndef mulDiv[F2[_, _]](v: Var[Int]): Free[DeclAlg[F2, *], Var[Int]] = {\n val d = new Language[F2] {}\n import d._\n // We can ask for the arrow evidence that must occur when some proc compiles us\n askArrow.flatMap{ implicit arrow: Arrow[F2] =>\n for {\n x <- v.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n }\n}\n\nproc[Int] { i =>\n for {\n x <- i.rmap(_ * 2)\n y <- mulDiv(x)\n } yield y\n}\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[672],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>u});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function l(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):l(l({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},d={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(a),u=r,v=m["".concat(s,".").concat(u)]||m[u]||d[u]||i;return a?t.createElement(v,l(l({ref:n},c),{},{components:a})):t.createElement(v,l({ref:n},c))}));function u(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:r,l[1]=o;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>d,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var t=a(7462),r=(a(7294),a(3905));const i={title:"Monadic Resolver DSL"},l=void 0,o={unversionedId:"server/schema/arrow_dsl",id:"server/schema/arrow_dsl",title:"Monadic Resolver DSL",description:"Modelling complex evaluation with Resolvers can be tricky.",source:"@site/docs/server/schema/arrow_dsl.md",sourceDirName:"server/schema",slug:"/server/schema/arrow_dsl",permalink:"/gql/docs/server/schema/arrow_dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/arrow_dsl.md",tags:[],version:"current",frontMatter:{title:"Monadic Resolver DSL"},sidebar:"docs",previous:{title:"The DSL",permalink:"/gql/docs/server/schema/dsl"},next:{title:"Resolvers",permalink:"/gql/docs/server/schema/resolvers"}},s={},p=[{value:"Technical details",id:"technical-details",level:3},{value:"Builder extensions",id:"builder-extensions",level:3},{value:"Composition",id:"composition",level:3},{value:"Toplevel expressions",id:"toplevel-expressions",level:4},{value:"Lifting arguments",id:"lifting-arguments",level:2},{value:"Choice",id:"choice",level:2},{value:"Batching example",id:"batching-example",level:2},{value:"Arrowless final?",id:"arrowless-final",level:2}],c={toc:p};function d(e){let{components:n,...a}=e;return(0,r.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"Modelling complex evaluation with ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"s can be tricky.\nIt often involves using ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," to pair up an arrow's result with it's input and proceeding with ",(0,r.kt)("inlineCode",{parentName:"p"},"map")," or ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap"),"."),(0,r.kt)("p",null,"Gql introduces a in-language monadic arrow dsl that re-writes a monadic arrow expression into a series of ",(0,r.kt)("inlineCode",{parentName:"p"},"map"),", ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," invocations."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"This feature is akin to the ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," ",(0,r.kt)("a",{parentName:"p",href:"https://en.wikibooks.org/wiki/Haskell/Arrow_tutorial#Arrow_proc_notation"},"notation in Haskell"),".")),(0,r.kt)("p",null,"Using the notation is straightforward, the same (covariant) combinators for ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver")," exist in the arrow dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.resolver._\nimport cats.implicits._\nimport cats.effect._\nimport gql.arrow._\n\n// Bind the effect type (IO) to aid with compiler errors and inference\nval d = dsl[IO]\nimport d._\nval r: Resolver[IO, Int, String] = \n proc[Int] { i: Var[Int] =>\n for {\n a <- i.evalMap(x => IO(x + 2))\n b <- a.evalMap(x => IO(x * 3))\n c <- (a, b).tupled.evalMap{ case (aa, bb) => IO(aa + bb) }\n } yield c.map(_.toString)\n }\n")),(0,r.kt)("details",null,(0,r.kt)("summary",null,"Most syntatic extensions don't make much sense unless the arrow type (Resolver) is bound which requires knowing the effect type. The full monadic arrows language is available as toplevel functions also."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import gql.arrow.{Language => L}\nL.proc[Resolver[IO, *, *], Int, String] { i =>\n for {\n x <- L.declare[Resolver[IO, *, *], Int, Int](i)(Resolver.lift[IO, Int](z => z * 2))\n y <- L.declare[Resolver[IO, *, *], (Int, Int), String]((x, x).tupled)(Resolver.lift[IO, (Int, Int)]{ case (a, b) => (a + b).toString() })\n } yield y\n}\n// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@65246a48\n"))),(0,r.kt)("p",null,"The underlying arrow is also available for composition via ",(0,r.kt)("inlineCode",{parentName:"p"},"apply"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"proc[Int] { i =>\n for {\n x <- i(_.evalMap(z => IO(z + 1)))\n out <- x.apply(_.map(_.toString))\n } yield out\n}\n")),(0,r.kt)("h3",{id:"technical-details"},"Technical details"),(0,r.kt)("p",null,"The dsl introduces two datatypes, ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl"),"."),(0,r.kt)("ul",null,(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"Var")," is a reference to a set of variables that occur in the arrow. ",(0,r.kt)("inlineCode",{parentName:"li"},"Var")," forms an ",(0,r.kt)("inlineCode",{parentName:"li"},"Applicative"),"."),(0,r.kt)("li",{parentName:"ul"},(0,r.kt)("inlineCode",{parentName:"li"},"Decl")," is used to re-write the monadic (",(0,r.kt)("inlineCode",{parentName:"li"},"flatMap"),") structure into an arrow. ",(0,r.kt)("inlineCode",{parentName:"li"},"Decl")," forms a ",(0,r.kt)("inlineCode",{parentName:"li"},"Monad"),".")),(0,r.kt)("p",null,"The primary use of ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl")," is to bind variables.\nEvery transformation on a ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable introduces a new ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable which is stored in the ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl")," structure."),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Since ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative")," that implies that ",(0,r.kt)("inlineCode",{parentName:"p"},"map")," is available for ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"map")," for ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," is not memoized since it does not lift ",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," into ",(0,r.kt)("inlineCode",{parentName:"p"},"Decl"),".\n",(0,r.kt)("inlineCode",{parentName:"p"},"Var")," has an extension ",(0,r.kt)("a",{parentName:"p",href:"https://github.com/typelevel/cats/blob/c8aabcacd6045b9aed5c8626c4bf5308dd3f4912/core/src/main/scala/cats/arrow/Profunctor.scala#L59"},(0,r.kt)("inlineCode",{parentName:"a"},"rmap"))," which introduces a new ",(0,r.kt)("inlineCode",{parentName:"p"},"Var"),"iable that memoizes the result.\nThat is, the following equivalences holds:"),(0,r.kt)("pre",{parentName:"admonition"},(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"declare((v: Var[A]).map(f))(Resolver.id[F, A]) <-> \n (v: Var[A]).rmap(f) <->\n (v: Var[A]).apply(_.map(f))\n"))),(0,r.kt)("p",null,"Closures are illegal in the dsl, as they are refer to variables that are not guaranteed to be available, so prefer invoking ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," once per ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),"."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"println {\n scala.util.Try {\n proc[Int] { i =>\n for {\n x <- i.evalMap(x => IO(x + 2))\n o <- x.andThen(proc[Int]{ _ =>\n x.rmap(y => y + 2)\n })\n } yield o\n }\n }.toEither.leftMap(_.getMessage)\n}\n// Left(Variable closure error.\n// Variable declared at arrow_dsl.md:70.\n// Compilation initiated at arrow_dsl.md:68.\n// Variables that were not declared in this scope may not be referenced.\n// Example:\n// ```\n// proc[Int]{ i =>\n// for {\n// x <- i.apply(_.map(_ + 1))\n// y <- i.apply(_.andThen(proc[Int]{ _ =>\n// // referencing 'x' here is an error\n// x.apply(_.map(_ + 1))\n// }))\n// } yield y\n// }\n// ```)\n")),(0,r.kt)("h3",{id:"builder-extensions"},"Builder extensions"),(0,r.kt)("p",null,"The dsl includes an extension method to ",(0,r.kt)("inlineCode",{parentName:"p"},"FieldBuilder")," that eases construction of ",(0,r.kt)("inlineCode",{parentName:"p"},"Field"),"s.\nThe dsl also enhances any resolver with a ",(0,r.kt)("inlineCode",{parentName:"p"},"proc")," extension method."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.ast._\n\nval gqlDsl = gql.dsl.GqlDsl[IO]\nimport gqlDsl._\n\nbuilder[Unit]{ b =>\n b.tpe(\n "MyType",\n "field" -> b.proc{ i =>\n for {\n x <- i.evalMap(_ => IO(1 + 2))\n y <- x.rmap(_ + 3)\n } yield y\n },\n "otherField" -> b(_.proc{ i =>\n i.evalMap(_ => IO(1 + 2))\n })\n )\n}\n')),(0,r.kt)("h3",{id:"composition"},"Composition"),(0,r.kt)("p",null,"Sharing common sub-arrows is a desirable property.\nThis can is expressed naturally with the dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def mulDiv(i: Var[Int]): Decl[Var[Int]] = for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n} yield y\n\nproc[Int](mulDiv(_) >>= mulDiv)\n// res4: Resolver[IO, Int, Int] = gql.resolver.Resolver@2ef4e668\n\nproc[Int](mulDiv(_) >>= mulDiv >>= mulDiv)\n// res5: Resolver[IO, Int, Int] = gql.resolver.Resolver@32711b16\n")),(0,r.kt)("h4",{id:"toplevel-expressions"},"Toplevel expressions"),(0,r.kt)("p",null,"It is recommended to always work in a scope with your effect type (",(0,r.kt)("inlineCode",{parentName:"p"},"F"),") bound, to ease inference and type signatures.\nThere is however support for toplevel proc resolver expressions."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def toplevelMulDiv[F[_]](i: Var[Int]): ResolverDecl[F, Var[Int]] = {\n val d = dsl[F]\n import d._\n for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n}\n")),(0,r.kt)("p",null,"Passing the dsl as an implicit parameter is also an option."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"def toplevelMulDiv[F[_]](i: Var[Int])(implicit d: ResolverArrowDsl[F]): ResolverDecl[F, Var[Int]] = {\n import d._\n for {\n x <- i.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n}\n")),(0,r.kt)("h2",{id:"lifting-arguments"},"Lifting arguments"),(0,r.kt)("p",null,"Request arguments is made easier by the arrow dsl."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'proc[Int] { i =>\n for {\n x <- i.evalMap(x => IO(x + 2))\n y <- argument(arg[Int]("age"))\n z <- (x, y).tupled.evalMap { case (a, b) => IO(a + b) }\n } yield z\n}\n')),(0,r.kt)("h2",{id:"choice"},"Choice"),(0,r.kt)("p",null,"The dsl also covers ",(0,r.kt)("inlineCode",{parentName:"p"},"ArrowChoice"),"'s ",(0,r.kt)("inlineCode",{parentName:"p"},"choice")," combinator."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'proc[Int] { i =>\n for {\n x <- i.rmap(v => if (v > 5) Left(v) else Right(v))\n y <- x.choice(\n l => l.rmap(_ * 2),\n r => for {\n a <- argument(arg[Int]("age"))\n out <- (a, r, i).tupled.rmap{ case (a, b, c) => a + b + c }\n } yield out\n )\n } yield y\n}\n')),(0,r.kt)("h2",{id:"batching-example"},"Batching example"),(0,r.kt)("p",null,"Some steps commonly occur when writing batched resolvers:"),(0,r.kt)("ol",null,(0,r.kt)("li",{parentName:"ol"},"Pulling an id out of the parent datatype."),(0,r.kt)("li",{parentName:"ol"},"Passing the id to a batching resolver."),(0,r.kt)("li",{parentName:"ol"},"Pairing the batched output with the parent datatype.")),(0,r.kt)("p",null,"This pairing requires some clever use of ",(0,r.kt)("inlineCode",{parentName:"p"},"first")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"contramap/lmap"),".\nThis behaviour is much easier to express monadically since we have access to closures."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def getAddresses(ids: Set[Int]): IO[Map[Int, String]] =\n IO(ids.toList.map(id => id -> s"Address $id").toMap)\n\ncase class DataType(id: Int, name: String)\nproc[DataType] { i =>\n for {\n id <- i.rmap(_.id)\n r = Resolver.inlineBatch[IO, Int, String](getAddresses).opt\n (addr: Var[Option[String]]) <- id.andThen(r)\n p = (i, addr).tupled\n out <- p.rmap{ case (dt, a) => s"${dt.name} @ ${a.getOrElse("")}" }\n } yield out\n}\n')),(0,r.kt)("h2",{id:"arrowless-final"},"Arrowless final?"),(0,r.kt)("p",null,"Expressions can be declared for any arrow, not just ",(0,r.kt)("inlineCode",{parentName:"p"},"Resolver"),".\nThe usefullness of this property is not significant, but an interesting property nonetheless."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"import cats.free._\nimport cats.arrow._\ndef mulDiv[F2[_, _]](v: Var[Int]): Free[DeclAlg[F2, *], Var[Int]] = {\n val d = new Language[F2] {}\n import d._\n // We can ask for the arrow evidence that must occur when some proc compiles us\n askArrow.flatMap{ implicit arrow: Arrow[F2] =>\n for {\n x <- v.rmap(_ * 2)\n y <- x.rmap(_ / 2)\n } yield y\n }\n}\n\nproc[Int] { i =>\n for {\n x <- i.rmap(_ * 2)\n y <- mulDiv(x)\n } yield y\n}\n")))}d.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/8588ea58.e8321fab.js b/assets/js/8588ea58.3b3182e4.js similarity index 96% rename from assets/js/8588ea58.e8321fab.js rename to assets/js/8588ea58.3b3182e4.js index 40356468..29e0d4e2 100644 --- a/assets/js/8588ea58.e8321fab.js +++ b/assets/js/8588ea58.3b3182e4.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[776],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>d});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function l(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):l(l({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(a),d=r,g=m["".concat(s,".").concat(d)]||m[d]||u[d]||i;return a?t.createElement(g,l(l({ref:n},c),{},{components:a})):t.createElement(g,l({ref:n},c))}));function d(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:r,l[1]=o;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var t=a(7462),r=(a(7294),a(3905));const i={title:"Query DSL"},l=void 0,o={unversionedId:"client/dsl",id:"client/dsl",title:"Query DSL",description:"gql provides a dsl for building graphql queries and response parsers.",source:"@site/docs/client/dsl.md",sourceDirName:"client",slug:"/client/dsl",permalink:"/gql/docs/client/dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/client/dsl.md",tags:[],version:"current",frontMatter:{title:"Query DSL"},sidebar:"docs",previous:{title:"Relational",permalink:"/gql/docs/server/integrations/relational"},next:{title:"Code generation",permalink:"/gql/docs/client/code-generation"}},s={},p=[{value:"Selections",id:"selections",level:2},{value:"Fragments",id:"fragments",level:2},{value:"Variables",id:"variables",level:2},{value:"Execution",id:"execution",level:2}],c={toc:p};function u(e){let{components:n,...a}=e;return(0,r.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"gql provides a dsl for building graphql queries and response parsers.\nWhen you compose your query with the dsl, you automatically compose both a query and a json decoder for the query response."),(0,r.kt)("h2",{id:"selections"},"Selections"),(0,r.kt)("p",null,"The simplest combinator is ",(0,r.kt)("inlineCode",{parentName:"p"},"sel")," which declares a field selection:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.client._\nimport gql.client.dsl._\nimport cats.implicits._\n\nsel[Option[String]]("name")\n// res0: SelectionSet[Option[String]] = SelectionSet(\n// impl = Fmap(\n// fa = Lift(\n// fa = Field(\n// fieldName = "name",\n// alias0 = None,\n// args0 = List(),\n// subQuery = OptionModifier(\n// subQuery = Terminal(decoder = io.circe.Decoder$$anon$26@70a69b28)\n// ),\n// directives0 = List()\n// )\n// ),\n// f = gql.client.SelectionSet$$$Lambda$12521/0x0000000803257040@62b9abd6\n// )\n// )\n')),(0,r.kt)("p",null,"Most combinators in the dsl have multiple overloads to provide various features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'sel.build[Option[String]]("name", _.alias("n"))\n\nsel.build[Option[String]]("name", _.args(arg("id", 42)))\n')),(0,r.kt)("p",null,"Every selection related structure forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative")," such that you can compose multiple selections together:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val s1 = sel[Option[String]]("name")\n\nval s2 = sel[Option[Int]]("age")\n\nval s3: SelectionSet[(Option[String], Option[Int])] = (s1, s2).tupled\n\nfinal case class PersonQuery(name: Option[String], age: Option[Int])\n\nval pq: SelectionSet[PersonQuery] = (s1, s2).mapN(PersonQuery.apply)\n')),(0,r.kt)("p",null,"Queries can also act as sub-selections (",(0,r.kt)("inlineCode",{parentName:"p"},"SubQuery")," in gql):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'sel[PersonQuery]("person") {\n pq\n}\n')),(0,r.kt)("p",null,"In the first examples the sub-query is captured implicitly.\nWe can also do this for custom types:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'implicit val pq2: SelectionSet[PersonQuery] = pq\n\nsel[PersonQuery]("person")\n')),(0,r.kt)("h2",{id:"fragments"},"Fragments"),(0,r.kt)("p",null,"Like in graphql we can define fragments to reuse selections:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val frag = fragment[String]("MyFragment", on="Person") {\n sel[String]("name")\n}\n\nval fragmentSpreads = sel[(Option[String], Option[Int])]("person") {\n (\n fragment.spread(frag),\n inlineFrag[Int]("Person") {\n sel[Int]("age")\n }\n ).tupled\n}\n')),(0,r.kt)("p",null,"Notice that both ",(0,r.kt)("inlineCode",{parentName:"p"},"fragment")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"inlineFrag")," return an optional result.\nThis is because the spread may not match on the type (if the spread condition is a sub-type of the spread-on type).\nThis is not always the desired behavior, and as such, fragments can be required:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"fragment.spread(frag).required: SelectionSet[String]\n")),(0,r.kt)("p",null,"You can provide additional information, should the fragment turn out to actually be missing:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'fragment.spread(frag).requiredFragment("MyFragment", on="Person")\n')),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Fragments should be preferred over re-using selections directly to reduce the rendered query size.")),(0,r.kt)("h2",{id:"variables"},"Variables"),(0,r.kt)("p",null,"Variables are accumulated into a sort of writer monad, such that they can be declared ad-hoc:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'variable[String]("name")\n// res7: Var[String, VariableName[String]] = Var(\n// impl = WriterT(\n// run = (\n// Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = None\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@19b1f55a\n// )\n// ),\n// variableNames = VariableName(name = "name")\n// )\n')),(0,r.kt)("p",null,"Variables can be combined with the ",(0,r.kt)("inlineCode",{parentName:"p"},"~")," operator:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'variable[String]("name") ~ variable[Int]("age")\n// res8: Var[(String, Int), (VariableName[String], VariableName[Int])] = Var(\n// impl = WriterT(\n// run = (\n// Append(\n// leftNE = Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = None\n// )\n// ),\n// rightNE = Singleton(\n// a = One(\n// name = VariableName(name = "age"),\n// tpe = "Int!",\n// default = None\n// )\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@488eb721\n// )\n// ),\n// variableNames = (VariableName(name = "name"), VariableName(name = "age"))\n// )\n')),(0,r.kt)("p",null,"Variables can also be declared as omittable, optionally with a default value:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'omittableVariable[String]("name", value("John")) ~\n omittableVariable[Int]("age")\n// res9: Var[(Option[String], Option[Int]), (VariableName[String], VariableName[Int])] = Var(\n// impl = WriterT(\n// run = (\n// Append(\n// leftNE = Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = Some(value = StringValue(v = "John", c = ()))\n// )\n// ),\n// rightNE = Singleton(\n// a = One(\n// name = VariableName(name = "age"),\n// tpe = "Int!",\n// default = None\n// )\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@2e03f5ad\n// )\n// ),\n// variableNames = (VariableName(name = "name"), VariableName(name = "age"))\n// )\n')),(0,r.kt)("p",null,'Variables can be "materialized" into a ',(0,r.kt)("inlineCode",{parentName:"p"},"VariableClosure")," by introducing them to a query:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'// Given a variable of type String, we can construct a query that returns an Int\nval queryWithVariable: VariableClosure[String, Int] = \n variable[String]("name").introduce{ name: VariableName[String] =>\n sel.build[Int]("id", _.args(arg("name", name)))\n }\n')),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"VariableClosure")," can be combined via ",(0,r.kt)("inlineCode",{parentName:"p"},"~")," and have their selections modified via ",(0,r.kt)("inlineCode",{parentName:"p"},"modify"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def subQuery1: VariableClosure[String, Int] = queryWithVariable\n\ndef subQuery2: VariableClosure[String, Int] = \n variable[String]("name2").introduce{ name: VariableName[String] =>\n sel.build[Int]("id2", _.args(arg("name", name)))\n }\n\ndef combined: VariableClosure[(String, String), Int] = \n (subQuery1 ~ subQuery2).modify(_.map{ case (v1, v2) => v1 + v2 })\n\n// VariableClosure also forms a profunctor so we can also use rmap\n(subQuery1 ~ subQuery2).rmap{ case (v1, v2) => v1 + v2 }\n')),(0,r.kt)("h2",{id:"execution"},"Execution"),(0,r.kt)("p",null,"Once a query has been constructed, there are three ways to wrap it together.\n",(0,r.kt)("inlineCode",{parentName:"p"},"simple")," if the query is parameter-less and name-less, ",(0,r.kt)("inlineCode",{parentName:"p"},"named")," if your query is named and ",(0,r.kt)("inlineCode",{parentName:"p"},"parameterized")," if it is both named and parameterized:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.parser.QueryAst.OperationType\ndef simpleQuery = Query.simple(\n OperationType.Query,\n sel[Unit]("person") {\n (\n sel[Int]("id"),\n sel.build[Int]("age", _.args(arg("numbers", List(42))))\n ).tupled.void\n }\n)\n\nsimpleQuery.compile.query\n// res11: String = "query { person { age( numbers: [42] ), id } }"\n\nQuery.named(\n OperationType.Mutation,\n "MyMutation",\n sel[String]("name")\n).compile.query\n// res12: String = "mutation MyMutation { name }"\n\ndef paramQuery = Query.parameterized(\n OperationType.Subscription,\n "MySubscription",\n combined\n)\n\ndef compiledParamQuery = paramQuery.compile(("first", "second"))\ncompiledParamQuery.query\n// res13: String = """subscription MySubscription( $name : String!, $name2 : String! ) {\n// id2( name: $name2 ),\n// id( name: $name )\n// }"""\n\ncompiledParamQuery.variables\n// res14: Option[io.circe.JsonObject] = Some(\n// value = object[name -> "first",name2 -> "second"]\n// )\n')))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[776],{3905:(e,n,a)=>{a.d(n,{Zo:()=>c,kt:()=>d});var t=a(7294);function r(e,n,a){return n in e?Object.defineProperty(e,n,{value:a,enumerable:!0,configurable:!0,writable:!0}):e[n]=a,e}function i(e,n){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),a.push.apply(a,t)}return a}function l(e){for(var n=1;n=0||(r[a]=e[a]);return r}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,a)&&(r[a]=e[a])}return r}var s=t.createContext({}),p=function(e){var n=t.useContext(s),a=n;return e&&(a="function"==typeof e?e(n):l(l({},n),e)),a},c=function(e){var n=p(e.components);return t.createElement(s.Provider,{value:n},e.children)},u={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var a=e.components,r=e.mdxType,i=e.originalType,s=e.parentName,c=o(e,["components","mdxType","originalType","parentName"]),m=p(a),d=r,g=m["".concat(s,".").concat(d)]||m[d]||u[d]||i;return a?t.createElement(g,l(l({ref:n},c),{},{components:a})):t.createElement(g,l({ref:n},c))}));function d(e,n){var a=arguments,r=n&&n.mdxType;if("string"==typeof e||r){var i=a.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:r,l[1]=o;for(var p=2;p{a.r(n),a.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>u,frontMatter:()=>i,metadata:()=>o,toc:()=>p});var t=a(7462),r=(a(7294),a(3905));const i={title:"Query DSL"},l=void 0,o={unversionedId:"client/dsl",id:"client/dsl",title:"Query DSL",description:"gql provides a dsl for building graphql queries and response parsers.",source:"@site/docs/client/dsl.md",sourceDirName:"client",slug:"/client/dsl",permalink:"/gql/docs/client/dsl",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/client/dsl.md",tags:[],version:"current",frontMatter:{title:"Query DSL"},sidebar:"docs",previous:{title:"Relational",permalink:"/gql/docs/server/integrations/relational"},next:{title:"Code generation",permalink:"/gql/docs/client/code-generation"}},s={},p=[{value:"Selections",id:"selections",level:2},{value:"Fragments",id:"fragments",level:2},{value:"Variables",id:"variables",level:2},{value:"Execution",id:"execution",level:2}],c={toc:p};function u(e){let{components:n,...a}=e;return(0,r.kt)("wrapper",(0,t.Z)({},c,a,{components:n,mdxType:"MDXLayout"}),(0,r.kt)("p",null,"gql provides a dsl for building graphql queries and response parsers.\nWhen you compose your query with the dsl, you automatically compose both a query and a json decoder for the query response."),(0,r.kt)("h2",{id:"selections"},"Selections"),(0,r.kt)("p",null,"The simplest combinator is ",(0,r.kt)("inlineCode",{parentName:"p"},"sel")," which declares a field selection:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.client._\nimport gql.client.dsl._\nimport cats.implicits._\n\nsel[Option[String]]("name")\n// res0: SelectionSet[Option[String]] = SelectionSet(\n// impl = Fmap(\n// fa = Lift(\n// fa = Field(\n// fieldName = "name",\n// alias0 = None,\n// args0 = List(),\n// subQuery = OptionModifier(\n// subQuery = Terminal(decoder = io.circe.Decoder$$anon$26@1566a132)\n// ),\n// directives0 = List()\n// )\n// ),\n// f = gql.client.SelectionSet$$$Lambda$12464/0x00000008033f6040@32bbd17f\n// )\n// )\n')),(0,r.kt)("p",null,"Most combinators in the dsl have multiple overloads to provide various features."),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'sel.build[Option[String]]("name", _.alias("n"))\n\nsel.build[Option[String]]("name", _.args(arg("id", 42)))\n')),(0,r.kt)("p",null,"Every selection related structure forms an ",(0,r.kt)("inlineCode",{parentName:"p"},"Applicative")," such that you can compose multiple selections together:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val s1 = sel[Option[String]]("name")\n\nval s2 = sel[Option[Int]]("age")\n\nval s3: SelectionSet[(Option[String], Option[Int])] = (s1, s2).tupled\n\nfinal case class PersonQuery(name: Option[String], age: Option[Int])\n\nval pq: SelectionSet[PersonQuery] = (s1, s2).mapN(PersonQuery.apply)\n')),(0,r.kt)("p",null,"Queries can also act as sub-selections (",(0,r.kt)("inlineCode",{parentName:"p"},"SubQuery")," in gql):"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'sel[PersonQuery]("person") {\n pq\n}\n')),(0,r.kt)("p",null,"In the first examples the sub-query is captured implicitly.\nWe can also do this for custom types:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'implicit val pq2: SelectionSet[PersonQuery] = pq\n\nsel[PersonQuery]("person")\n')),(0,r.kt)("h2",{id:"fragments"},"Fragments"),(0,r.kt)("p",null,"Like in graphql we can define fragments to reuse selections:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'val frag = fragment[String]("MyFragment", on="Person") {\n sel[String]("name")\n}\n\nval fragmentSpreads = sel[(Option[String], Option[Int])]("person") {\n (\n fragment.spread(frag),\n inlineFrag[Int]("Person") {\n sel[Int]("age")\n }\n ).tupled\n}\n')),(0,r.kt)("p",null,"Notice that both ",(0,r.kt)("inlineCode",{parentName:"p"},"fragment")," and ",(0,r.kt)("inlineCode",{parentName:"p"},"inlineFrag")," return an optional result.\nThis is because the spread may not match on the type (if the spread condition is a sub-type of the spread-on type).\nThis is not always the desired behavior, and as such, fragments can be required:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},"fragment.spread(frag).required: SelectionSet[String]\n")),(0,r.kt)("p",null,"You can provide additional information, should the fragment turn out to actually be missing:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'fragment.spread(frag).requiredFragment("MyFragment", on="Person")\n')),(0,r.kt)("admonition",{type:"info"},(0,r.kt)("p",{parentName:"admonition"},"Fragments should be preferred over re-using selections directly to reduce the rendered query size.")),(0,r.kt)("h2",{id:"variables"},"Variables"),(0,r.kt)("p",null,"Variables are accumulated into a sort of writer monad, such that they can be declared ad-hoc:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'variable[String]("name")\n// res7: Var[String, VariableName[String]] = Var(\n// impl = WriterT(\n// run = (\n// Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = None\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@6e174085\n// )\n// ),\n// variableNames = VariableName(name = "name")\n// )\n')),(0,r.kt)("p",null,"Variables can be combined with the ",(0,r.kt)("inlineCode",{parentName:"p"},"~")," operator:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'variable[String]("name") ~ variable[Int]("age")\n// res8: Var[(String, Int), (VariableName[String], VariableName[Int])] = Var(\n// impl = WriterT(\n// run = (\n// Append(\n// leftNE = Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = None\n// )\n// ),\n// rightNE = Singleton(\n// a = One(\n// name = VariableName(name = "age"),\n// tpe = "Int!",\n// default = None\n// )\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@7873a582\n// )\n// ),\n// variableNames = (VariableName(name = "name"), VariableName(name = "age"))\n// )\n')),(0,r.kt)("p",null,"Variables can also be declared as omittable, optionally with a default value:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'omittableVariable[String]("name", value("John")) ~\n omittableVariable[Int]("age")\n// res9: Var[(Option[String], Option[Int]), (VariableName[String], VariableName[Int])] = Var(\n// impl = WriterT(\n// run = (\n// Append(\n// leftNE = Singleton(\n// a = One(\n// name = VariableName(name = "name"),\n// tpe = "String!",\n// default = Some(value = StringValue(v = "John", c = ()))\n// )\n// ),\n// rightNE = Singleton(\n// a = One(\n// name = VariableName(name = "age"),\n// tpe = "Int!",\n// default = None\n// )\n// )\n// ),\n// io.circe.Encoder$AsObject$$anon$68@741e4d19\n// )\n// ),\n// variableNames = (VariableName(name = "name"), VariableName(name = "age"))\n// )\n')),(0,r.kt)("p",null,'Variables can be "materialized" into a ',(0,r.kt)("inlineCode",{parentName:"p"},"VariableClosure")," by introducing them to a query:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'// Given a variable of type String, we can construct a query that returns an Int\nval queryWithVariable: VariableClosure[String, Int] = \n variable[String]("name").introduce{ name: VariableName[String] =>\n sel.build[Int]("id", _.args(arg("name", name)))\n }\n')),(0,r.kt)("p",null,(0,r.kt)("inlineCode",{parentName:"p"},"VariableClosure")," can be combined via ",(0,r.kt)("inlineCode",{parentName:"p"},"~")," and have their selections modified via ",(0,r.kt)("inlineCode",{parentName:"p"},"modify"),":"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'def subQuery1: VariableClosure[String, Int] = queryWithVariable\n\ndef subQuery2: VariableClosure[String, Int] = \n variable[String]("name2").introduce{ name: VariableName[String] =>\n sel.build[Int]("id2", _.args(arg("name", name)))\n }\n\ndef combined: VariableClosure[(String, String), Int] = \n (subQuery1 ~ subQuery2).modify(_.map{ case (v1, v2) => v1 + v2 })\n\n// VariableClosure also forms a profunctor so we can also use rmap\n(subQuery1 ~ subQuery2).rmap{ case (v1, v2) => v1 + v2 }\n')),(0,r.kt)("h2",{id:"execution"},"Execution"),(0,r.kt)("p",null,"Once a query has been constructed, there are three ways to wrap it together.\n",(0,r.kt)("inlineCode",{parentName:"p"},"simple")," if the query is parameter-less and name-less, ",(0,r.kt)("inlineCode",{parentName:"p"},"named")," if your query is named and ",(0,r.kt)("inlineCode",{parentName:"p"},"parameterized")," if it is both named and parameterized:"),(0,r.kt)("pre",null,(0,r.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.parser.QueryAst.OperationType\ndef simpleQuery = Query.simple(\n OperationType.Query,\n sel[Unit]("person") {\n (\n sel[Int]("id"),\n sel.build[Int]("age", _.args(arg("numbers", List(42))))\n ).tupled.void\n }\n)\n\nsimpleQuery.compile.query\n// res11: String = "query { person { age( numbers: [42] ), id } }"\n\nQuery.named(\n OperationType.Mutation,\n "MyMutation",\n sel[String]("name")\n).compile.query\n// res12: String = "mutation MyMutation { name }"\n\ndef paramQuery = Query.parameterized(\n OperationType.Subscription,\n "MySubscription",\n combined\n)\n\ndef compiledParamQuery = paramQuery.compile(("first", "second"))\ncompiledParamQuery.query\n// res13: String = """subscription MySubscription( $name : String!, $name2 : String! ) {\n// id2( name: $name2 ),\n// id( name: $name )\n// }"""\n\ncompiledParamQuery.variables\n// res14: Option[io.circe.JsonObject] = Some(\n// value = object[name -> "first",name2 -> "second"]\n// )\n')))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/92cae478.9c63c4e7.js b/assets/js/92cae478.5c0b8d4e.js similarity index 99% rename from assets/js/92cae478.9c63c4e7.js rename to assets/js/92cae478.5c0b8d4e.js index 619f9d68..d2735521 100644 --- a/assets/js/92cae478.9c63c4e7.js +++ b/assets/js/92cae478.5c0b8d4e.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[633],{3905:(e,n,r)=>{r.d(n,{Zo:()=>u,kt:()=>d});var t=r(7294);function a(e,n,r){return n in e?Object.defineProperty(e,n,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[n]=r,e}function i(e,n){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),r.push.apply(r,t)}return r}function l(e){for(var n=1;n=0||(a[r]=e[r]);return a}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=t.createContext({}),c=function(e){var n=t.useContext(s),r=n;return e&&(r="function"==typeof e?e(n):l(l({},n),e)),r},u=function(e){var n=c(e.components);return t.createElement(s.Provider,{value:n},e.children)},p={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var r=e.components,a=e.mdxType,i=e.originalType,s=e.parentName,u=o(e,["components","mdxType","originalType","parentName"]),m=c(r),d=a,f=m["".concat(s,".").concat(d)]||m[d]||p[d]||i;return r?t.createElement(f,l(l({ref:n},u),{},{components:r})):t.createElement(f,l({ref:n},u))}));function d(e,n){var r=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var i=r.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:a,l[1]=o;for(var c=2;c{r.r(n),r.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>p,frontMatter:()=>i,metadata:()=>o,toc:()=>c});var t=r(7462),a=(r(7294),r(3905));const i={title:"Error handling"},l=void 0,o={unversionedId:"server/schema/error_handling",id:"server/schema/error_handling",title:"Error handling",description:"There are different types of errors in gql.",source:"@site/docs/server/schema/error_handling.md",sourceDirName:"server/schema",slug:"/server/schema/error_handling",permalink:"/gql/docs/server/schema/error_handling",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/error_handling.md",tags:[],version:"current",frontMatter:{title:"Error handling"},sidebar:"docs",previous:{title:"Context",permalink:"/gql/docs/server/schema/context"},next:{title:"Compiler",permalink:"/gql/docs/server/schema/compiler"}},s={},c=[{value:"Execution",id:"execution",level:2},{value:"Examples",id:"examples",level:2},{value:"Exception trick",id:"exception-trick",level:3}],u={toc:c};function p(e){let{components:n,...i}=e;return(0,a.kt)("wrapper",(0,t.Z)({},u,i,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"There are different types of errors in gql."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Schema validation errors, which should be caught in development.\nThese are for instance caused by duplicate field names or invalid typenames."),(0,a.kt)("li",{parentName:"ul"},"Query preparation errors, which are errors caused by invalid queries."),(0,a.kt)("li",{parentName:"ul"},"Execuion errors. These are errors that occur during query evaluation, caused by resolvers that fail.")),(0,a.kt)("h2",{id:"execution"},"Execution"),(0,a.kt)("p",null,"Error handling in gql can be performed in two ways, it can be returned explicitly or raised in ",(0,a.kt)("inlineCode",{parentName:"p"},"F"),"."),(0,a.kt)("h2",{id:"examples"},"Examples"),(0,a.kt)("p",null,"Let's setup the scene:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.ast._\nimport gql.dsl.all._\nimport gql.dsl.all.value._\nimport gql._\nimport cats.implicits._\nimport cats.data._\nimport cats.effect._\nimport cats.effect.unsafe.implicits.global\nimport io.circe.syntax._\n \ndef multifailSchema = \n tpe[IO, Unit](\n "Query", \n "field" -> build.from(arged(arg[Int]("i", scalar(10))).evalMap{ \n case 0 => IO.pure(Ior.left("fail gracefully"))\n case 1 => IO.raiseError(new Exception("fail hard"))\n case i => IO.pure(Ior.right(i))\n }.rethrow)\n )\n\ndef go(query: String, tpe: Type[IO, Unit] = multifailSchema) = \n Schema.query(tpe).flatMap { sch =>\n Compiler[IO].compile(sch, query) match {\n case Left(err) => \n println(err)\n IO.pure(err.asJson)\n case Right(Application.Query(fa)) => \n fa.map{x => println(x.errors);x.asJson }\n }\n }.unsafeRunSync()\n \ngo("query { field }")\n// Chain()\n// res0: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : 10\n// }]\n// )\n')),(0,a.kt)("p",null,"A query can fail gracefully by returning ",(0,a.kt)("inlineCode",{parentName:"p"},"Ior.left"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { field(i: 0) }")\n// Chain(Error(Right(fail gracefully),Chain("field")))\n// res1: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : null\n// },errors -> [\n// {\n// "message" : "fail gracefully",\n// "path" : [\n// "field"\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"A query can fail hard by raising an exception:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { field(i: 1) }")\n// Chain(Error(Left(java.lang.Exception: fail hard),Chain("field")))\n// res2: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : null\n// },errors -> [\n// {\n// "message" : "internal error",\n// "path" : [\n// "field"\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"A query can also fail before even evaluating the query:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { nonExisting }")\n// Preparation(Chain(PositionalError(Cursor(Chain()),List(Caret(0,8,8)),Field \'nonExisting\' is not a member of `Query`.)))\n// res3: io.circe.Json = JObject(\n// value = object[errors -> [\n// {\n// "message" : "Field \'nonExisting\' is not a member of `Query`.",\n// "locations" : [\n// {\n// "line" : 0,\n// "column" : 8\n// }\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"And finally, it can fail if it isn't parsable:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'def largerQuery = """\n query {\n field1\n field2(test: 42)\n }\n \n fragment test on Test {\n -value1\n value2 \n }\n"""\n\ngo(largerQuery)\n// Parse(ParseError(Caret(8,4,80),cats.Always@20fed7cb))\n// res4: io.circe.Json = JObject(\n// value = object[errors -> [\n// {\n// "message" : "could not parse query",\n// "locations" : [\n// {\n// "line" : 8,\n// "column" : 4\n// }\n// ],\n// "error" : "\\u001b[34mfailed at offset 80 on line 7 with code 45\\none of \\"...\\"\\nin char in range A to Z (code 65 to 90)\\nin char in range _ to _ (code 95 to 95)\\nin char in range a to z (code 97 to 122)\\nfor document:\\n\\u001b[0m\\u001b[32m| \\u001b[0m\\u001b[32m\\n| query {\\n| field1\\n| field2(test: 42)\\n| }\\n| \\n| fragment test on Test {\\n| \\u001b[41m\\u001b[30m-\\u001b[0m\\u001b[32mvalue1\\n| \\u001b[31m>^^^^^^^ line:7, column:4, offset:80, character code code:45\\u001b[0m\\u001b[32m\\n| value2 \\n| }\\n| \\u001b[0m\\u001b[0m"\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"Parser errors also look nice in ANSI terminals:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"Terminal output",src:r(4543).Z,width:"350",height:"329"})),(0,a.kt)("h3",{id:"exception-trick"},"Exception trick"),(0,a.kt)("p",null,"If for whatever reason you wish to pass information through exceptions, that is also possible:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'final case class MyException(msg: String, data: Int) extends Exception(msg)\n\nval res = \n Schema.query(\n tpe[IO, Unit](\n "Query",\n "field" -> eff(_ => IO.raiseError[String](MyException("fail hard", 42)))\n )\n ).flatMap { sch =>\n Compiler[IO].compile(sch, "query { field } ") match {\n case Right(Application.Query(run)) => run\n }\n }.unsafeRunSync()\n// res: QueryResult = QueryResult(\n// data = object[field -> null],\n// errors = Singleton(\n// a = Error(\n// error = Left(value = MyException(msg = "fail hard", data = 42)),\n// path = Singleton(a = JString(value = "field"))\n// )\n// )\n// )\n \nres.errors.headOption.flatMap(_.error.left.toOption) match {\n case Some(MyException(_, data)) => println(s"Got data: $data")\n case _ => println("No data")\n}\n// Got data: 42\n')))}p.isMDXComponent=!0},4543:(e,n,r)=>{r.d(n,{Z:()=>t});const t=r.p+"assets/images/error_image-7805f49e8b21d536040a6e281835df41.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[633],{3905:(e,n,r)=>{r.d(n,{Zo:()=>u,kt:()=>d});var t=r(7294);function a(e,n,r){return n in e?Object.defineProperty(e,n,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[n]=r,e}function i(e,n){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var t=Object.getOwnPropertySymbols(e);n&&(t=t.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),r.push.apply(r,t)}return r}function l(e){for(var n=1;n=0||(a[r]=e[r]);return a}(e,n);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(t=0;t=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(a[r]=e[r])}return a}var s=t.createContext({}),c=function(e){var n=t.useContext(s),r=n;return e&&(r="function"==typeof e?e(n):l(l({},n),e)),r},u=function(e){var n=c(e.components);return t.createElement(s.Provider,{value:n},e.children)},p={inlineCode:"code",wrapper:function(e){var n=e.children;return t.createElement(t.Fragment,{},n)}},m=t.forwardRef((function(e,n){var r=e.components,a=e.mdxType,i=e.originalType,s=e.parentName,u=o(e,["components","mdxType","originalType","parentName"]),m=c(r),d=a,f=m["".concat(s,".").concat(d)]||m[d]||p[d]||i;return r?t.createElement(f,l(l({ref:n},u),{},{components:r})):t.createElement(f,l({ref:n},u))}));function d(e,n){var r=arguments,a=n&&n.mdxType;if("string"==typeof e||a){var i=r.length,l=new Array(i);l[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:a,l[1]=o;for(var c=2;c{r.r(n),r.d(n,{assets:()=>s,contentTitle:()=>l,default:()=>p,frontMatter:()=>i,metadata:()=>o,toc:()=>c});var t=r(7462),a=(r(7294),r(3905));const i={title:"Error handling"},l=void 0,o={unversionedId:"server/schema/error_handling",id:"server/schema/error_handling",title:"Error handling",description:"There are different types of errors in gql.",source:"@site/docs/server/schema/error_handling.md",sourceDirName:"server/schema",slug:"/server/schema/error_handling",permalink:"/gql/docs/server/schema/error_handling",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/schema/error_handling.md",tags:[],version:"current",frontMatter:{title:"Error handling"},sidebar:"docs",previous:{title:"Context",permalink:"/gql/docs/server/schema/context"},next:{title:"Compiler",permalink:"/gql/docs/server/schema/compiler"}},s={},c=[{value:"Execution",id:"execution",level:2},{value:"Examples",id:"examples",level:2},{value:"Exception trick",id:"exception-trick",level:3}],u={toc:c};function p(e){let{components:n,...i}=e;return(0,a.kt)("wrapper",(0,t.Z)({},u,i,{components:n,mdxType:"MDXLayout"}),(0,a.kt)("p",null,"There are different types of errors in gql."),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},"Schema validation errors, which should be caught in development.\nThese are for instance caused by duplicate field names or invalid typenames."),(0,a.kt)("li",{parentName:"ul"},"Query preparation errors, which are errors caused by invalid queries."),(0,a.kt)("li",{parentName:"ul"},"Execuion errors. These are errors that occur during query evaluation, caused by resolvers that fail.")),(0,a.kt)("h2",{id:"execution"},"Execution"),(0,a.kt)("p",null,"Error handling in gql can be performed in two ways, it can be returned explicitly or raised in ",(0,a.kt)("inlineCode",{parentName:"p"},"F"),"."),(0,a.kt)("h2",{id:"examples"},"Examples"),(0,a.kt)("p",null,"Let's setup the scene:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.ast._\nimport gql.dsl.all._\nimport gql.dsl.all.value._\nimport gql._\nimport cats.implicits._\nimport cats.data._\nimport cats.effect._\nimport cats.effect.unsafe.implicits.global\nimport io.circe.syntax._\n \ndef multifailSchema = \n tpe[IO, Unit](\n "Query", \n "field" -> build.from(arged(arg[Int]("i", scalar(10))).evalMap{ \n case 0 => IO.pure(Ior.left("fail gracefully"))\n case 1 => IO.raiseError(new Exception("fail hard"))\n case i => IO.pure(Ior.right(i))\n }.rethrow)\n )\n\ndef go(query: String, tpe: Type[IO, Unit] = multifailSchema) = \n Schema.query(tpe).flatMap { sch =>\n Compiler[IO].compile(sch, query) match {\n case Left(err) => \n println(err)\n IO.pure(err.asJson)\n case Right(Application.Query(fa)) => \n fa.map{x => println(x.errors);x.asJson }\n }\n }.unsafeRunSync()\n \ngo("query { field }")\n// Chain()\n// res0: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : 10\n// }]\n// )\n')),(0,a.kt)("p",null,"A query can fail gracefully by returning ",(0,a.kt)("inlineCode",{parentName:"p"},"Ior.left"),":"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { field(i: 0) }")\n// Chain(Error(Right(fail gracefully),Chain("field")))\n// res1: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : null\n// },errors -> [\n// {\n// "message" : "fail gracefully",\n// "path" : [\n// "field"\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"A query can fail hard by raising an exception:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { field(i: 1) }")\n// Chain(Error(Left(java.lang.Exception: fail hard),Chain("field")))\n// res2: io.circe.Json = JObject(\n// value = object[data -> {\n// "field" : null\n// },errors -> [\n// {\n// "message" : "internal error",\n// "path" : [\n// "field"\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"A query can also fail before even evaluating the query:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'go("query { nonExisting }")\n// Preparation(Chain(PositionalError(Cursor(Chain()),List(Caret(0,8,8)),Field \'nonExisting\' is not a member of `Query`.)))\n// res3: io.circe.Json = JObject(\n// value = object[errors -> [\n// {\n// "message" : "Field \'nonExisting\' is not a member of `Query`.",\n// "locations" : [\n// {\n// "line" : 0,\n// "column" : 8\n// }\n// ]\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"And finally, it can fail if it isn't parsable:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'def largerQuery = """\n query {\n field1\n field2(test: 42)\n }\n \n fragment test on Test {\n -value1\n value2 \n }\n"""\n\ngo(largerQuery)\n// Parse(ParseError(Caret(8,4,80),cats.Always@4f58e46b))\n// res4: io.circe.Json = JObject(\n// value = object[errors -> [\n// {\n// "message" : "could not parse query",\n// "locations" : [\n// {\n// "line" : 8,\n// "column" : 4\n// }\n// ],\n// "error" : "\\u001b[34mfailed at offset 80 on line 7 with code 45\\none of \\"...\\"\\nin char in range A to Z (code 65 to 90)\\nin char in range _ to _ (code 95 to 95)\\nin char in range a to z (code 97 to 122)\\nfor document:\\n\\u001b[0m\\u001b[32m| \\u001b[0m\\u001b[32m\\n| query {\\n| field1\\n| field2(test: 42)\\n| }\\n| \\n| fragment test on Test {\\n| \\u001b[41m\\u001b[30m-\\u001b[0m\\u001b[32mvalue1\\n| \\u001b[31m>^^^^^^^ line:7, column:4, offset:80, character code code:45\\u001b[0m\\u001b[32m\\n| value2 \\n| }\\n| \\u001b[0m\\u001b[0m"\n// }\n// ]]\n// )\n')),(0,a.kt)("p",null,"Parser errors also look nice in ANSI terminals:"),(0,a.kt)("p",null,(0,a.kt)("img",{alt:"Terminal output",src:r(4543).Z,width:"350",height:"329"})),(0,a.kt)("h3",{id:"exception-trick"},"Exception trick"),(0,a.kt)("p",null,"If for whatever reason you wish to pass information through exceptions, that is also possible:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-scala"},'final case class MyException(msg: String, data: Int) extends Exception(msg)\n\nval res = \n Schema.query(\n tpe[IO, Unit](\n "Query",\n "field" -> eff(_ => IO.raiseError[String](MyException("fail hard", 42)))\n )\n ).flatMap { sch =>\n Compiler[IO].compile(sch, "query { field } ") match {\n case Right(Application.Query(run)) => run\n }\n }.unsafeRunSync()\n// res: QueryResult = QueryResult(\n// data = object[field -> null],\n// errors = Singleton(\n// a = Error(\n// error = Left(value = MyException(msg = "fail hard", data = 42)),\n// path = Singleton(a = JString(value = "field"))\n// )\n// )\n// )\n \nres.errors.headOption.flatMap(_.error.left.toOption) match {\n case Some(MyException(_, data)) => println(s"Got data: $data")\n case _ => println("No data")\n}\n// Got data: 42\n')))}p.isMDXComponent=!0},4543:(e,n,r)=>{r.d(n,{Z:()=>t});const t=r.p+"assets/images/error_image-7805f49e8b21d536040a6e281835df41.png"}}]); \ No newline at end of file diff --git a/assets/js/ceb10064.fb80a129.js b/assets/js/ceb10064.65987bd1.js similarity index 95% rename from assets/js/ceb10064.fb80a129.js rename to assets/js/ceb10064.65987bd1.js index 0ab38c34..46491717 100644 --- a/assets/js/ceb10064.fb80a129.js +++ b/assets/js/ceb10064.65987bd1.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[143],{3905:(e,a,t)=>{t.d(a,{Zo:()=>o,kt:()=>d});var n=t(7294);function s(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function l(e){for(var a=1;a=0||(s[t]=e[t]);return s}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(s[t]=e[t])}return s}var p=n.createContext({}),i=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):l(l({},a),e)),t},o=function(e){var a=i(e.components);return n.createElement(p.Provider,{value:a},e.children)},c={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},h=n.forwardRef((function(e,a){var t=e.components,s=e.mdxType,r=e.originalType,p=e.parentName,o=m(e,["components","mdxType","originalType","parentName"]),h=i(t),d=s,u=h["".concat(p,".").concat(d)]||h[d]||c[d]||r;return t?n.createElement(u,l(l({ref:a},o),{},{components:t})):n.createElement(u,l({ref:a},o))}));function d(e,a){var t=arguments,s=a&&a.mdxType;if("string"==typeof e||s){var r=t.length,l=new Array(r);l[0]=h;var m={};for(var p in a)hasOwnProperty.call(a,p)&&(m[p]=a[p]);m.originalType=e,m.mdxType="string"==typeof e?e:s,l[1]=m;for(var i=2;i{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>l,default:()=>c,frontMatter:()=>r,metadata:()=>m,toc:()=>i});var n=t(7462),s=(t(7294),t(3905));const r={title:"Planning"},l=void 0,m={unversionedId:"server/execution/planning",id:"server/execution/planning",title:"Planning",description:"Planner algorithm",source:"@site/docs/server/execution/planning.md",sourceDirName:"server/execution",slug:"/server/execution/planning",permalink:"/gql/docs/server/execution/planning",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/execution/planning.md",tags:[],version:"current",frontMatter:{title:"Planning"},sidebar:"docs",previous:{title:"Structuring large applications",permalink:"/gql/docs/server/schema/structuring_apps"},next:{title:"Statistics",permalink:"/gql/docs/server/execution/statistics"}},p={},i=[{value:"Planner algorithm",id:"planner-algorithm",level:2},{value:"The high-level idea",id:"the-high-level-idea",level:3},{value:"Default planner intuition",id:"default-planner-intuition",level:3},{value:"Converting a query to a problem",id:"converting-a-query-to-a-problem",level:3}],o={toc:i};function c(e){let{components:a,...r}=e;return(0,s.kt)("wrapper",(0,n.Z)({},o,r,{components:a,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"planner-algorithm"},"Planner algorithm"),(0,s.kt)("h3",{id:"the-high-level-idea"},"The high-level idea"),(0,s.kt)("p",null,"When planning for a query the planner assigns weights to every edge/field, optionally labels them with their batch names (if a batch resolver was used) and finally converts the problem to a simpler DAG (directed asyclic graph) form."),(0,s.kt)("admonition",{type:"tip"},(0,s.kt)("p",{parentName:"admonition"},"For information on how the planner assigns weights, check out the ",(0,s.kt)("a",{parentName:"p",href:"/gql/docs/server/execution/statistics"},"statistics"),".")),(0,s.kt)("p",null,"The goal now is to form batches by contracting nodes that are batchable (jobs of the same family in scheduling/OR jargon)."),(0,s.kt)("p",null,"For instance, assume the following DAG is in question:"),(0,s.kt)("mermaid",{value:"flowchart LR\n Query((Query)) ---\x3e a(a
batch: z
cost: 2)\n a --\x3e A((A))\n\n Query --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e c(c
batch: z
cost: 2)\n c --\x3e C((C))"}),(0,s.kt)("p",null,"Now consider the following plan, where a possible contraction is colored red:"),(0,s.kt)("mermaid",{value:"flowchart LR\n Query((Query)) -----\x3e a(a
batch: z
cost: 2)\n a --\x3e A((A))\n\n Query --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e c(c
batch: z
cost: 2)\n c --\x3e C((C))\n\nstyle a stroke:#f66,stroke-dasharray: 5 5\nstyle c stroke:#f66,stroke-dasharray: 5 5"}),(0,s.kt)("p",null,"And contracted it becomes:"),(0,s.kt)("mermaid",{value:'flowchart LR\n Query((Query)) --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e ac("{a,c}"
batch: z
cost: 2)\n ac --\x3e A((A))\n ac --\x3e C((C))\n\nstyle ac stroke:#f66,stroke-dasharray: 5 5'}),(0,s.kt)("h3",{id:"default-planner-intuition"},"Default planner intuition"),(0,s.kt)("p",null,"The default planner heuristic in gql lazily enumerates all plans, imposing a locally greedy order to the enumerated plans.\nThe default planner also employs some simple but powerful pruning rules to eliminate trivially uninteresting plan variantions."),(0,s.kt)("p",null,'The planner works through the problem from the root(s) and down through the DAG.\nThe algorithm keeps some state regarding what batches have been visited and what nodes are scheduled in the "current plan".\nIn a round of planning the algorithm will figure out what nodes are schedulable by looking at it\'s state.'),(0,s.kt)("p",null,"The planner will lazily generate all combinations of possible batches of schedulable nodes."),(0,s.kt)("admonition",{type:"note"},(0,s.kt)("p",{parentName:"admonition"},"One can easily cause a combinatorial explosion by generation of combinations.\nFortunately we don't consider every plan (and in fact, the default algorithm only pulls ",(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mi",{parentName:"mrow"},"O"),(0,s.kt)("mo",{parentName:"mrow",stretchy:"false"},"("),(0,s.kt)("mi",{parentName:"mrow",mathvariant:"normal"},"\u2223"),(0,s.kt)("mi",{parentName:"mrow"},"V"),(0,s.kt)("mi",{parentName:"mrow",mathvariant:"normal"},"\u2223"),(0,s.kt)("mo",{parentName:"mrow",stretchy:"false"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"O(|V|)")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1em",verticalAlign:"-0.25em"}}),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.02778em"}},"O"),(0,s.kt)("span",{parentName:"span",className:"mopen"},"("),(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2223"),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.22222em"}},"V"),(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2223"),(0,s.kt)("span",{parentName:"span",className:"mclose"},")")))))," plans).\nFurthermore, most problems will have less than n plans.")),(0,s.kt)("p",null,'The planner will always generate the largest batches first, hence the "locally greedy" ordering.'),(0,s.kt)("p",null,"Trivially schedulable nodes are always scheduled first if possible; a pruning rules makes sure of this.\nFor a given scheduleable node, if no other un-scheduled node exists of the same family (excluding it's own descendants), then that node's only and optimal batch is the singleton batch containing only that node."),(0,s.kt)("p",null,"There are other pruning rules that have been considered, but don't seem necessary for practical problems since most problems produce very few plans."),(0,s.kt)("p",null,'One such pruning rule consideres "optimal" generated batch combinations.\nIf the largest batch that the planner can generate ',(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mfrac",{parentName:"mrow",linethickness:"0px"},(0,s.kt)("mi",{parentName:"mfrac"},"n"),(0,s.kt)("mi",{parentName:"mfrac"},"n")),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"n \\choose n")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.2em",verticalAlign:"-0.35em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},"(")),(0,s.kt)("span",{parentName:"span",className:"mfrac"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.7454em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-2.355em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n")))),(0,s.kt)("span",{parentName:"span",style:{top:"-3.144em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n"))))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.345em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},")"))))))),' contains nodes that all have the same "latest ending parent", then all other combinations ',(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mrow",{parentName:"mrow"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mfrac",{parentName:"mrow",linethickness:"0px"},(0,s.kt)("mi",{parentName:"mfrac"},"n"),(0,s.kt)("mi",{parentName:"mfrac"},"k")),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("mtext",{parentName:"mrow"},"\xa0where\xa0"),(0,s.kt)("mi",{parentName:"mrow"},"k"),(0,s.kt)("mo",{parentName:"mrow"},"<"),(0,s.kt)("mi",{parentName:"mrow"},"n")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"{n \\choose k} \\text{ where } k < n")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.2em",verticalAlign:"-0.35em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},"(")),(0,s.kt)("span",{parentName:"span",className:"mfrac"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.7454em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-2.355em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight",style:{marginRight:"0.03148em"}},"k")))),(0,s.kt)("span",{parentName:"span",style:{top:"-3.144em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n"))))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.345em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},")")))),(0,s.kt)("span",{parentName:"span",className:"mord text"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\xa0where\xa0")),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.03148em"}},"k"),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}}),(0,s.kt)("span",{parentName:"span",className:"mrel"},"<"),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}})),(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"0.4306em"}}),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"n")))))," are trivially fruitless."),(0,s.kt)("p",null,"Once the planner has constructed a lazy list of batches, it then consideres every plan that ",(0,s.kt)("em",{parentName:"p"},"could")," exist for every batch, hence a computational difficulty of finding the ",(0,s.kt)("strong",{parentName:"p"},"best")," plan."),(0,s.kt)("admonition",{type:"info"},(0,s.kt)("p",{parentName:"admonition"},"If you want to understand the algorithm better, consider taking a look at the source code.")),(0,s.kt)("h3",{id:"converting-a-query-to-a-problem"},"Converting a query to a problem"),(0,s.kt)("p",null,"gql considers only resolvers when running query planning.\nEvery field that is traversed in a query is expanded to all the resolvers it consists such that it becomes a digraph."),(0,s.kt)("p",null,"As an example, consider the following instance:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},'import gql._\nimport gql.dsl.all._\nimport gql.ast._\nimport gql.server.planner._\nimport gql.resolver._\nimport scala.concurrent.duration._\nimport cats.implicits._\nimport cats.effect._\nimport cats.effect.unsafe.implicits.global\n\ncase object Child\n\ndef wait[I](ms: Int) = Resolver.effect[IO, I](_ => IO.sleep(50.millis))\n\nval schem = Schema.stateful{\n Resolver.batch[IO, Unit, Int](_ => IO.sleep(10.millis) as Map(() -> 42)).flatMap{ b1 =>\n Resolver.batch[IO, Unit, String](_ => IO.sleep(15.millis) as Map(() -> "42")).map{ b2 =>\n implicit lazy val child: Type[IO, Child.type] = builder[IO, Child.type]{ b =>\n b.tpe(\n "Child",\n "b1" -> b.from(wait(50) andThen b1.opt map (_.get)),\n "b2" -> b.from(wait(100) andThen b2.opt map (_.get)),\n )\n }\n\n SchemaShape.unit[IO](\n builder[IO, Unit]{ b =>\n b.fields(\n "child" -> b.from(wait(42) as Child),\n "b2" -> b.from(wait(25) andThen b2.opt map (_.get))\n )\n }\n )\n }\n }\n}.unsafeRunSync()\n')),(0,s.kt)("p",null,"Now let's define our query and modify our schema so the planner logs:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},'val qry = """\n query {\n child {\n b1\n b2\n }\n b2\n }\n"""\n\nval withLoggedPlanner = schem.copy(planner = new Planner[IO] {\n def plan(naive: NodeTree): IO[OptimizedDAG] =\n schem.planner.plan(naive).map { output =>\n println(output.show(ansiColors = false))\n println(s"naive: ${output.totalCost}")\n println(s"optimized: ${output.optimizedCost}")\n output\n }\n})\n')),(0,s.kt)("p",null,"And we plan for it inspect the result:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"def runQry() = {\n Compiler[IO]\n .compile(withLoggedPlanner, qry)\n .traverse_{ case Application.Query(fa) => fa }\n .unsafeRunSync()\n}\n\nrunQry()\n// name: Query_child.compose-left.compose-right.compose-right, cost: 100.00, end: 100.00, batch: 5\n// name: Child_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 2\n// name: batch_1, cost: 100.00, end: 300.00, batch: 4\n// name: Child_b1.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 3\n// name: batch_0, cost: 100.00, end: 300.00, batch: 1\n// name: Query_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 100.00, batch: 0\n// name: batch_1, cost: 100.00, end: 200.00, batch: 4\n// >>>>>>>>>>>>>name: batch_1, cost: 100.00, end: 300.00, batch: 4\n// \n// naive: 700.0\n// optimized: 600.0\n")),(0,s.kt)("p",null,"We can warm up the weights (statistics) a bit by running the query a few times:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"(0 to 10).toList.foreach(_ => runQry())\n")),(0,s.kt)("p",null,"Now we can see how the weights are assigned:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"runQry()\n// name: Query_child.compose-left.compose-right.compose-right, cost: 50261.91, end: 50261.91, batch: 4\n// name: Child_b2.compose-left.compose-left.compose-right, cost: 50216.00, end: 100477.91, batch: 5\n// name: batch_1, cost: 15171.46, end: 115649.37, batch: 0\n// name: Child_b1.compose-left.compose-left.compose-right, cost: 50217.82, end: 100479.73, batch: 3\n// name: batch_0, cost: 10190.82, end: 110670.55, batch: 2\n// name: Query_b2.compose-left.compose-left.compose-right, cost: 50276.73, end: 50276.73, batch: 1\n// name: batch_1, cost: 15171.46, end: 65448.19, batch: 0\n// >>>>>>>>>>>>>>>>>name: batch_1, cost: 15171.46, end: 115649.37, batch: 0\n// \n// naive: 241506.1818181818\n// optimized: 226334.72727272726\n")),(0,s.kt)("p",null,"Plans can also be shown nicely in a terminal with ANSI colors:\n",(0,s.kt)("img",{alt:"Terminal output",src:t(1745).Z,width:"1144",height:"333"})))}c.isMDXComponent=!0},1745:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/plan_image-bacfe186ade480842758a5d754111dfd.png"}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[143],{3905:(e,a,t)=>{t.d(a,{Zo:()=>o,kt:()=>d});var n=t(7294);function s(e,a,t){return a in e?Object.defineProperty(e,a,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[a]=t,e}function r(e,a){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);a&&(n=n.filter((function(a){return Object.getOwnPropertyDescriptor(e,a).enumerable}))),t.push.apply(t,n)}return t}function l(e){for(var a=1;a=0||(s[t]=e[t]);return s}(e,a);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(s[t]=e[t])}return s}var p=n.createContext({}),i=function(e){var a=n.useContext(p),t=a;return e&&(t="function"==typeof e?e(a):l(l({},a),e)),t},o=function(e){var a=i(e.components);return n.createElement(p.Provider,{value:a},e.children)},c={inlineCode:"code",wrapper:function(e){var a=e.children;return n.createElement(n.Fragment,{},a)}},h=n.forwardRef((function(e,a){var t=e.components,s=e.mdxType,r=e.originalType,p=e.parentName,o=m(e,["components","mdxType","originalType","parentName"]),h=i(t),d=s,u=h["".concat(p,".").concat(d)]||h[d]||c[d]||r;return t?n.createElement(u,l(l({ref:a},o),{},{components:t})):n.createElement(u,l({ref:a},o))}));function d(e,a){var t=arguments,s=a&&a.mdxType;if("string"==typeof e||s){var r=t.length,l=new Array(r);l[0]=h;var m={};for(var p in a)hasOwnProperty.call(a,p)&&(m[p]=a[p]);m.originalType=e,m.mdxType="string"==typeof e?e:s,l[1]=m;for(var i=2;i{t.r(a),t.d(a,{assets:()=>p,contentTitle:()=>l,default:()=>c,frontMatter:()=>r,metadata:()=>m,toc:()=>i});var n=t(7462),s=(t(7294),t(3905));const r={title:"Planning"},l=void 0,m={unversionedId:"server/execution/planning",id:"server/execution/planning",title:"Planning",description:"Planner algorithm",source:"@site/docs/server/execution/planning.md",sourceDirName:"server/execution",slug:"/server/execution/planning",permalink:"/gql/docs/server/execution/planning",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/server/execution/planning.md",tags:[],version:"current",frontMatter:{title:"Planning"},sidebar:"docs",previous:{title:"Structuring large applications",permalink:"/gql/docs/server/schema/structuring_apps"},next:{title:"Statistics",permalink:"/gql/docs/server/execution/statistics"}},p={},i=[{value:"Planner algorithm",id:"planner-algorithm",level:2},{value:"The high-level idea",id:"the-high-level-idea",level:3},{value:"Default planner intuition",id:"default-planner-intuition",level:3},{value:"Converting a query to a problem",id:"converting-a-query-to-a-problem",level:3}],o={toc:i};function c(e){let{components:a,...r}=e;return(0,s.kt)("wrapper",(0,n.Z)({},o,r,{components:a,mdxType:"MDXLayout"}),(0,s.kt)("h2",{id:"planner-algorithm"},"Planner algorithm"),(0,s.kt)("h3",{id:"the-high-level-idea"},"The high-level idea"),(0,s.kt)("p",null,"When planning for a query the planner assigns weights to every edge/field, optionally labels them with their batch names (if a batch resolver was used) and finally converts the problem to a simpler DAG (directed asyclic graph) form."),(0,s.kt)("admonition",{type:"tip"},(0,s.kt)("p",{parentName:"admonition"},"For information on how the planner assigns weights, check out the ",(0,s.kt)("a",{parentName:"p",href:"/gql/docs/server/execution/statistics"},"statistics"),".")),(0,s.kt)("p",null,"The goal now is to form batches by contracting nodes that are batchable (jobs of the same family in scheduling/OR jargon)."),(0,s.kt)("p",null,"For instance, assume the following DAG is in question:"),(0,s.kt)("mermaid",{value:"flowchart LR\n Query((Query)) ---\x3e a(a
batch: z
cost: 2)\n a --\x3e A((A))\n\n Query --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e c(c
batch: z
cost: 2)\n c --\x3e C((C))"}),(0,s.kt)("p",null,"Now consider the following plan, where a possible contraction is colored red:"),(0,s.kt)("mermaid",{value:"flowchart LR\n Query((Query)) -----\x3e a(a
batch: z
cost: 2)\n a --\x3e A((A))\n\n Query --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e c(c
batch: z
cost: 2)\n c --\x3e C((C))\n\nstyle a stroke:#f66,stroke-dasharray: 5 5\nstyle c stroke:#f66,stroke-dasharray: 5 5"}),(0,s.kt)("p",null,"And contracted it becomes:"),(0,s.kt)("mermaid",{value:'flowchart LR\n Query((Query)) --\x3e b(b
cost: 1)\n b --\x3e B((B))\n \n B ---\x3e ac("{a,c}"
batch: z
cost: 2)\n ac --\x3e A((A))\n ac --\x3e C((C))\n\nstyle ac stroke:#f66,stroke-dasharray: 5 5'}),(0,s.kt)("h3",{id:"default-planner-intuition"},"Default planner intuition"),(0,s.kt)("p",null,"The default planner heuristic in gql lazily enumerates all plans, imposing a locally greedy order to the enumerated plans.\nThe default planner also employs some simple but powerful pruning rules to eliminate trivially uninteresting plan variantions."),(0,s.kt)("p",null,'The planner works through the problem from the root(s) and down through the DAG.\nThe algorithm keeps some state regarding what batches have been visited and what nodes are scheduled in the "current plan".\nIn a round of planning the algorithm will figure out what nodes are schedulable by looking at it\'s state.'),(0,s.kt)("p",null,"The planner will lazily generate all combinations of possible batches of schedulable nodes."),(0,s.kt)("admonition",{type:"note"},(0,s.kt)("p",{parentName:"admonition"},"One can easily cause a combinatorial explosion by generation of combinations.\nFortunately we don't consider every plan (and in fact, the default algorithm only pulls ",(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mi",{parentName:"mrow"},"O"),(0,s.kt)("mo",{parentName:"mrow",stretchy:"false"},"("),(0,s.kt)("mi",{parentName:"mrow",mathvariant:"normal"},"\u2223"),(0,s.kt)("mi",{parentName:"mrow"},"V"),(0,s.kt)("mi",{parentName:"mrow",mathvariant:"normal"},"\u2223"),(0,s.kt)("mo",{parentName:"mrow",stretchy:"false"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"O(|V|)")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1em",verticalAlign:"-0.25em"}}),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.02778em"}},"O"),(0,s.kt)("span",{parentName:"span",className:"mopen"},"("),(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2223"),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.22222em"}},"V"),(0,s.kt)("span",{parentName:"span",className:"mord"},"\u2223"),(0,s.kt)("span",{parentName:"span",className:"mclose"},")")))))," plans).\nFurthermore, most problems will have less than n plans.")),(0,s.kt)("p",null,'The planner will always generate the largest batches first, hence the "locally greedy" ordering.'),(0,s.kt)("p",null,"Trivially schedulable nodes are always scheduled first if possible; a pruning rules makes sure of this.\nFor a given scheduleable node, if no other un-scheduled node exists of the same family (excluding it's own descendants), then that node's only and optimal batch is the singleton batch containing only that node."),(0,s.kt)("p",null,"There are other pruning rules that have been considered, but don't seem necessary for practical problems since most problems produce very few plans."),(0,s.kt)("p",null,'One such pruning rule consideres "optimal" generated batch combinations.\nIf the largest batch that the planner can generate ',(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mfrac",{parentName:"mrow",linethickness:"0px"},(0,s.kt)("mi",{parentName:"mfrac"},"n"),(0,s.kt)("mi",{parentName:"mfrac"},"n")),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"n \\choose n")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.2em",verticalAlign:"-0.35em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},"(")),(0,s.kt)("span",{parentName:"span",className:"mfrac"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.7454em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-2.355em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n")))),(0,s.kt)("span",{parentName:"span",style:{top:"-3.144em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n"))))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.345em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},")"))))))),' contains nodes that all have the same "latest ending parent", then all other combinations ',(0,s.kt)("span",{parentName:"p",className:"math math-inline"},(0,s.kt)("span",{parentName:"span",className:"katex"},(0,s.kt)("span",{parentName:"span",className:"katex-mathml"},(0,s.kt)("math",{parentName:"span",xmlns:"http://www.w3.org/1998/Math/MathML"},(0,s.kt)("semantics",{parentName:"math"},(0,s.kt)("mrow",{parentName:"semantics"},(0,s.kt)("mrow",{parentName:"mrow"},(0,s.kt)("mo",{parentName:"mrow",fence:"true"},"("),(0,s.kt)("mfrac",{parentName:"mrow",linethickness:"0px"},(0,s.kt)("mi",{parentName:"mfrac"},"n"),(0,s.kt)("mi",{parentName:"mfrac"},"k")),(0,s.kt)("mo",{parentName:"mrow",fence:"true"},")")),(0,s.kt)("mtext",{parentName:"mrow"},"\xa0where\xa0"),(0,s.kt)("mi",{parentName:"mrow"},"k"),(0,s.kt)("mo",{parentName:"mrow"},"<"),(0,s.kt)("mi",{parentName:"mrow"},"n")),(0,s.kt)("annotation",{parentName:"semantics",encoding:"application/x-tex"},"{n \\choose k} \\text{ where } k < n")))),(0,s.kt)("span",{parentName:"span",className:"katex-html","aria-hidden":"true"},(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"1.2em",verticalAlign:"-0.35em"}}),(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mord"},(0,s.kt)("span",{parentName:"span",className:"mopen delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},"(")),(0,s.kt)("span",{parentName:"span",className:"mfrac"},(0,s.kt)("span",{parentName:"span",className:"vlist-t vlist-t2"},(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.7454em"}},(0,s.kt)("span",{parentName:"span",style:{top:"-2.355em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight",style:{marginRight:"0.03148em"}},"k")))),(0,s.kt)("span",{parentName:"span",style:{top:"-3.144em"}},(0,s.kt)("span",{parentName:"span",className:"pstrut",style:{height:"2.7em"}}),(0,s.kt)("span",{parentName:"span",className:"sizing reset-size6 size3 mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mtight"},(0,s.kt)("span",{parentName:"span",className:"mord mathnormal mtight"},"n"))))),(0,s.kt)("span",{parentName:"span",className:"vlist-s"},"\u200b")),(0,s.kt)("span",{parentName:"span",className:"vlist-r"},(0,s.kt)("span",{parentName:"span",className:"vlist",style:{height:"0.345em"}},(0,s.kt)("span",{parentName:"span"}))))),(0,s.kt)("span",{parentName:"span",className:"mclose delimcenter",style:{top:"0em"}},(0,s.kt)("span",{parentName:"span",className:"delimsizing size1"},")")))),(0,s.kt)("span",{parentName:"span",className:"mord text"},(0,s.kt)("span",{parentName:"span",className:"mord"},"\xa0where\xa0")),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal",style:{marginRight:"0.03148em"}},"k"),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}}),(0,s.kt)("span",{parentName:"span",className:"mrel"},"<"),(0,s.kt)("span",{parentName:"span",className:"mspace",style:{marginRight:"0.2778em"}})),(0,s.kt)("span",{parentName:"span",className:"base"},(0,s.kt)("span",{parentName:"span",className:"strut",style:{height:"0.4306em"}}),(0,s.kt)("span",{parentName:"span",className:"mord mathnormal"},"n")))))," are trivially fruitless."),(0,s.kt)("p",null,"Once the planner has constructed a lazy list of batches, it then consideres every plan that ",(0,s.kt)("em",{parentName:"p"},"could")," exist for every batch, hence a computational difficulty of finding the ",(0,s.kt)("strong",{parentName:"p"},"best")," plan."),(0,s.kt)("admonition",{type:"info"},(0,s.kt)("p",{parentName:"admonition"},"If you want to understand the algorithm better, consider taking a look at the source code.")),(0,s.kt)("h3",{id:"converting-a-query-to-a-problem"},"Converting a query to a problem"),(0,s.kt)("p",null,"gql considers only resolvers when running query planning.\nEvery field that is traversed in a query is expanded to all the resolvers it consists such that it becomes a digraph."),(0,s.kt)("p",null,"As an example, consider the following instance:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},'import gql._\nimport gql.dsl.all._\nimport gql.ast._\nimport gql.server.planner._\nimport gql.resolver._\nimport scala.concurrent.duration._\nimport cats.implicits._\nimport cats.effect._\nimport cats.effect.unsafe.implicits.global\n\ncase object Child\n\ndef wait[I](ms: Int) = Resolver.effect[IO, I](_ => IO.sleep(50.millis))\n\nval schem = Schema.stateful{\n Resolver.batch[IO, Unit, Int](_ => IO.sleep(10.millis) as Map(() -> 42)).flatMap{ b1 =>\n Resolver.batch[IO, Unit, String](_ => IO.sleep(15.millis) as Map(() -> "42")).map{ b2 =>\n implicit lazy val child: Type[IO, Child.type] = builder[IO, Child.type]{ b =>\n b.tpe(\n "Child",\n "b1" -> b.from(wait(50) andThen b1.opt map (_.get)),\n "b2" -> b.from(wait(100) andThen b2.opt map (_.get)),\n )\n }\n\n SchemaShape.unit[IO](\n builder[IO, Unit]{ b =>\n b.fields(\n "child" -> b.from(wait(42) as Child),\n "b2" -> b.from(wait(25) andThen b2.opt map (_.get))\n )\n }\n )\n }\n }\n}.unsafeRunSync()\n')),(0,s.kt)("p",null,"Now let's define our query and modify our schema so the planner logs:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},'val qry = """\n query {\n child {\n b1\n b2\n }\n b2\n }\n"""\n\nval withLoggedPlanner = schem.copy(planner = new Planner[IO] {\n def plan(naive: NodeTree): IO[OptimizedDAG] =\n schem.planner.plan(naive).map { output =>\n println(output.show(ansiColors = false))\n println(s"naive: ${output.totalCost}")\n println(s"optimized: ${output.optimizedCost}")\n output\n }\n})\n')),(0,s.kt)("p",null,"And we plan for it inspect the result:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"def runQry() = {\n Compiler[IO]\n .compile(withLoggedPlanner, qry)\n .traverse_{ case Application.Query(fa) => fa }\n .unsafeRunSync()\n}\n\nrunQry()\n// name: Query_child.compose-left.compose-right.compose-right, cost: 100.00, end: 100.00, batch: 5\n// name: Child_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 2\n// name: batch_1, cost: 100.00, end: 300.00, batch: 4\n// name: Child_b1.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 3\n// name: batch_0, cost: 100.00, end: 300.00, batch: 1\n// name: Query_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 100.00, batch: 0\n// name: batch_1, cost: 100.00, end: 200.00, batch: 4\n// >>>>>>>>>>>>>name: batch_1, cost: 100.00, end: 300.00, batch: 4\n// \n// naive: 700.0\n// optimized: 600.0\n")),(0,s.kt)("p",null,"We can warm up the weights (statistics) a bit by running the query a few times:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"(0 to 10).toList.foreach(_ => runQry())\n")),(0,s.kt)("p",null,"Now we can see how the weights are assigned:"),(0,s.kt)("pre",null,(0,s.kt)("code",{parentName:"pre",className:"language-scala"},"runQry()\n// name: Query_child.compose-left.compose-right.compose-right, cost: 50207.73, end: 50207.73, batch: 2\n// name: Child_b2.compose-left.compose-left.compose-right, cost: 50157.91, end: 100365.64, batch: 3\n// name: batch_1, cost: 15154.46, end: 115520.10, batch: 0\n// name: Child_b1.compose-left.compose-left.compose-right, cost: 50160.10, end: 100367.82, batch: 4\n// name: batch_0, cost: 10156.19, end: 110524.00, batch: 1\n// name: Query_b2.compose-left.compose-left.compose-right, cost: 50216.10, end: 50216.10, batch: 5\n// name: batch_1, cost: 15154.46, end: 65370.55, batch: 0\n// >>>>>>>>>>>>>>>>>name: batch_1, cost: 15154.46, end: 115520.10, batch: 0\n// \n// naive: 241206.9090909091\n// optimized: 226052.45454545456\n")),(0,s.kt)("p",null,"Plans can also be shown nicely in a terminal with ANSI colors:\n",(0,s.kt)("img",{alt:"Terminal output",src:t(1745).Z,width:"1144",height:"333"})))}c.isMDXComponent=!0},1745:(e,a,t)=>{t.d(a,{Z:()=>n});const n=t.p+"assets/images/plan_image-bacfe186ade480842758a5d754111dfd.png"}}]); \ No newline at end of file diff --git a/assets/js/ffc79f40.9c6cb8e8.js b/assets/js/ffc79f40.fc52bab8.js similarity index 97% rename from assets/js/ffc79f40.9c6cb8e8.js rename to assets/js/ffc79f40.fc52bab8.js index 374fc308..1763e979 100644 --- a/assets/js/ffc79f40.9c6cb8e8.js +++ b/assets/js/ffc79f40.fc52bab8.js @@ -1 +1 @@ -"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[960],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>g});var r=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=r.createContext({}),c=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},u=function(e){var n=c(e.components);return r.createElement(s.Provider,{value:n},e.children)},p={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,l=e.mdxType,a=e.originalType,s=e.parentName,u=o(e,["components","mdxType","originalType","parentName"]),m=c(t),g=l,d=m["".concat(s,".").concat(g)]||m[g]||p[g]||a;return t?r.createElement(d,i(i({ref:n},u),{},{components:t})):r.createElement(d,i({ref:n},u))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var a=t.length,i=new Array(a);i[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:l,i[1]=o;for(var c=2;c{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>o,toc:()=>c});var r=t(7462),l=(t(7294),t(3905));const a={title:"Code generation"},i=void 0,o={unversionedId:"client/code-generation",id:"client/code-generation",title:"Code generation",description:"Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries.",source:"@site/docs/client/code-generation.md",sourceDirName:"client",slug:"/client/code-generation",permalink:"/gql/docs/client/code-generation",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/client/code-generation.md",tags:[],version:"current",frontMatter:{title:"Code generation"},sidebar:"docs",previous:{title:"Query DSL",permalink:"/gql/docs/client/dsl"},next:{title:"Http4s",permalink:"/gql/docs/client/integrations/http4s"}},s={},c=[{value:"Setting up",id:"setting-up",level:2},{value:"Sbt integration",id:"sbt-integration",level:3},{value:"Usage",id:"usage",level:2}],u={toc:c};function p(e){let{components:n,...t}=e;return(0,l.kt)("wrapper",(0,r.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries."),(0,l.kt)("p",null,"gql also features a code generator that transforms a graphql schema file and a set of queries (or fragments) into dsl code."),(0,l.kt)("h2",{id:"setting-up"},"Setting up"),(0,l.kt)("p",null,"The code generator comes as a stand-alone cli at the maven coordinates:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'// build.sbt\n"io.github.valdemargr" %% "gql-client-codegen-cli" % "0.3.5"\n')),(0,l.kt)("p",null,"The code generator can also be integrated into sbt for a smoother development experience:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'// project/plugins.sbt\naddSbtPlugin("io.github.valdemargr" % "gql-client-codegen-sbt" % "0.3.5")\n')),(0,l.kt)("h3",{id:"sbt-integration"},"Sbt integration"),(0,l.kt)("p",null,"By default the sbt integration will look for a schema file in the resources directory at ",(0,l.kt)("inlineCode",{parentName:"p"},".../resources/schema.graphql")," and queries in the resources directory at ",(0,l.kt)("inlineCode",{parentName:"p"},".../resources/queries"),"."),(0,l.kt)("p",null,"You can, however, override or add more sources at custom locations:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val myBuild = \n ...\n .settings(\n resourceGroups += Gql.resourceGroup(\n name="other_resources",\n schemaFile= file("path/to/schema.graphql"),\n file("path/to/query1.graphql"),\n file("path/to/query2.graphql")\n )\n )\n')),(0,l.kt)("h2",{id:"usage"},"Usage"),(0,l.kt)("p",null,"When the code-generator is invoked it will use the queries and fragments in combination with the schema to generate a set of scala files containing the equivalent query in scala code."),(0,l.kt)("p",null,"For this demonstration, the code generator will be invoked manually:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.client.codegen.{ GeneratorCli => Gen }\nimport fs2.io.file.Files\nimport cats.effect._\nimport cats.implicits._\nimport cats.effect.unsafe.implicits.global\n\ndef runQuery(queryDef: String) =\n Files[IO].tempDirectory.use{ tmp => \n val schemaFile = tmp / "schema.graphql"\n val queryFile = tmp / "query.graphql"\n val sharedOutFile = tmp / "shared.scala"\n val queryOutFile = tmp / "query.scala"\n\n val schemaDef = """\n enum HelloEnum {\n HELLO,\n WORLD\n }\n\n type A {\n a: String\n }\n\n type B {\n b: String\n }\n\n union HelloUnion = A | B\n\n type Query {\n helloEnum(name: String): HelloEnum,\n helloUnion(name2: String): HelloUnion\n }\n """\n\n val writeSchemaF = fs2.Stream(schemaDef)\n .through(fs2.text.utf8.encode)\n .through(Files[IO].writeAll(schemaFile))\n .compile\n .drain\n\n val writeQueryF = fs2.Stream(queryDef)\n .through(fs2.text.utf8.encode)\n .through(Files[IO].writeAll(queryFile))\n .compile\n .drain\n\n import io.circe._\n import io.circe.syntax._\n val jo = Json.obj(\n "schema" -> Json.fromString(schemaFile.toString),\n "shared" -> Json.fromString(sharedOutFile.toString),\n "queries" -> Json.arr(\n Json.obj(\n "query" -> Json.fromString(queryFile.toString),\n "output" -> Json.fromString(queryOutFile.toString)\n )\n )\n )\n\n writeSchemaF >>\n writeQueryF >>\n Gen.run(List("--validate", "--input",jo.spaces2)) >>\n Files[IO].readAll(queryOutFile)\n .through(fs2.text.utf8.decode)\n .compile\n .string\n .map(println)\n }.unsafeRunSync()\n\nrunQuery(\n """\n fragment HelloFragment on Query {\n helloEnum(name: $name)\n }\n\n query HelloQuery($name: String) {\n ...HelloFragment\n helloUnion(name2: "hey") {\n ... on A {\n a\n }\n ... on B {\n b\n }\n }\n }\n """\n)\n// package gql.client.generated\n// \n// import _root_.gql.client._\n// import _root_.gql.client.dsl._\n// import _root_.gql.parser.{Value => V, AnyValue, Const}\n// import cats.implicits._\n// \n// final case class HelloFragment(\n// helloEnum: Option[HelloEnum]\n// )\n// \n// object HelloFragment {\n// implicit val selectionSet: SelectionSet[HelloFragment] = (\n// sel.build[Option[HelloEnum]]("helloEnum", x => x.args(arg("name", V.VariableValue("name"))))\n// ).map(apply)\n// \n// implicit val fragdef: Fragment[HelloFragment] = fragment[HelloFragment]("HelloFragment", "Query")\n// }\n// \n// final case class HelloQuery(\n// helloFragment: gql.client.generated.HelloFragment,\n// helloUnion: Option[HelloQuery.HelloUnion]\n// )\n// \n// object HelloQuery {\n// final case class HelloUnion(\n// a: Option[HelloUnion.InlineA],\n// b: Option[HelloUnion.InlineB]\n// ) {\n// lazy val variant: Option[HelloUnion.Variant] = \n// (a).map(HelloUnion.Variant.OnA.apply) orElse\n// (b).map(HelloUnion.Variant.OnB.apply)\n// }\n// \n// object HelloUnion {\n// sealed trait Variant extends Product with Serializable\n// object Variant {\n// final case class OnA(\n// a: HelloUnion.InlineA\n// ) extends Variant\n// \n// final case class OnB(\n// b: HelloUnion.InlineB\n// ) extends Variant\n// }\n// \n// final case class InlineA(\n// a: Option[String]\n// )\n// \n// object InlineA {\n// implicit val selectionSet: SelectionSet[InlineA] = (\n// sel.build[Option[String]]("a", x => x)\n// ).map(apply)\n// }\n// \n// final case class InlineB(\n// b: Option[String]\n// )\n// \n// object InlineB {\n// implicit val selectionSet: SelectionSet[InlineB] = (\n// sel.build[Option[String]]("b", x => x)\n// ).map(apply)\n// }\n// \n// implicit val selectionSet: SelectionSet[HelloUnion] = (\n// inlineFrag.build[HelloUnion.InlineA]("A", x => x),\n// inlineFrag.build[HelloUnion.InlineB]("B", x => x)\n// ).mapN(apply)\n// }\n// \n// implicit val selectionSet: SelectionSet[HelloQuery] = (\n// fragment.spread.build[gql.client.generated.HelloFragment](x => x).requiredFragment("HelloFragment", "Query"),\n// sel.build[Option[HelloQuery.HelloUnion]]("helloUnion", x => x.args(arg("name2", V.StringValue("hey"))))\n// ).mapN(apply)\n// \n// final case class Variables(\n// name: Option[Option[String]] = None\n// ) {\n// def setName(value: Option[String]): Variables = copy(name = Some(value))\n// }\n// \n// val queryExpr = (\n// omittableVariable[Option[String]]("name")\n// ).introduce{ _ =>\n// selectionSet\n// }\n// \n// val query = _root_.gql.client.Query.parameterized(_root_.gql.parser.QueryAst.OperationType.Query, "HelloQuery", queryExpr)\n// }\n')),(0,l.kt)("p",null,"When supplying the ",(0,l.kt)("inlineCode",{parentName:"p"},"--validate")," flag, gql will generate a stub implementation of the schema and run the same code as if running a gql server."),(0,l.kt)("p",null,"Lets construct a helper to show this:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'import scala.util.{Try,Failure}\n// We will also remove the ansii color codes from the output, since they don\'t render well in the docs\ndef runFail(q: String) = \n Try {\n runQuery(q)\n } match {\n case Failure(ex) => println(ex.getMessage().replaceAll("\\u001B\\\\[[;\\\\d]*m", ""))\n }\n')),(0,l.kt)("p",null,"Now with a parsing error:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'runFail(\n """\n query MyQuery {\n test.,test\n }\n """\n)\n// Failed to generate code with error: failed at offset 41 on line 2 with code 46\n// char in range } to } (code 125 to 125)\n// for document:\n// | \n// | query MyQuery {\n// | test.,test\n// | >>>>>>>>>>>>>^^^^^^^ line:2, column:16, offset:41, character code code:46\n// | }\n// |\n')),(0,l.kt)("p",null,"And also with a query validation error:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'runFail(\n """\n query MyQuery {\n helloEnum(name: 1)\n }\n """\n)\n// Failed to generate code with error: decoding failure for type `String` with message Got value \'1\' with wrong type, expecting string at root.helloEnum.name.String\n// in file /tmp/12148183625915676551/query.graphql\n// | \n// | query MyQuery {\n// | helloEnum(name: 1)\n// | >>>>>>>>>>>>>>>>>>>>>>>>>^^^^^^^ line:2, column:28, offset:53, character code code:49\n// | }\n// |\n')))}p.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkwebsite=self.webpackChunkwebsite||[]).push([[960],{3905:(e,n,t)=>{t.d(n,{Zo:()=>u,kt:()=>g});var r=t(7294);function l(e,n,t){return n in e?Object.defineProperty(e,n,{value:t,enumerable:!0,configurable:!0,writable:!0}):e[n]=t,e}function a(e,n){var t=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);n&&(r=r.filter((function(n){return Object.getOwnPropertyDescriptor(e,n).enumerable}))),t.push.apply(t,r)}return t}function i(e){for(var n=1;n=0||(l[t]=e[t]);return l}(e,n);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0||Object.prototype.propertyIsEnumerable.call(e,t)&&(l[t]=e[t])}return l}var s=r.createContext({}),c=function(e){var n=r.useContext(s),t=n;return e&&(t="function"==typeof e?e(n):i(i({},n),e)),t},u=function(e){var n=c(e.components);return r.createElement(s.Provider,{value:n},e.children)},p={inlineCode:"code",wrapper:function(e){var n=e.children;return r.createElement(r.Fragment,{},n)}},m=r.forwardRef((function(e,n){var t=e.components,l=e.mdxType,a=e.originalType,s=e.parentName,u=o(e,["components","mdxType","originalType","parentName"]),m=c(t),g=l,d=m["".concat(s,".").concat(g)]||m[g]||p[g]||a;return t?r.createElement(d,i(i({ref:n},u),{},{components:t})):r.createElement(d,i({ref:n},u))}));function g(e,n){var t=arguments,l=n&&n.mdxType;if("string"==typeof e||l){var a=t.length,i=new Array(a);i[0]=m;var o={};for(var s in n)hasOwnProperty.call(n,s)&&(o[s]=n[s]);o.originalType=e,o.mdxType="string"==typeof e?e:l,i[1]=o;for(var c=2;c{t.r(n),t.d(n,{assets:()=>s,contentTitle:()=>i,default:()=>p,frontMatter:()=>a,metadata:()=>o,toc:()=>c});var r=t(7462),l=(t(7294),t(3905));const a={title:"Code generation"},i=void 0,o={unversionedId:"client/code-generation",id:"client/code-generation",title:"Code generation",description:"Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries.",source:"@site/docs/client/code-generation.md",sourceDirName:"client",slug:"/client/code-generation",permalink:"/gql/docs/client/code-generation",draft:!1,editUrl:"https://github.com/valdemargr/gql/tree/main/docs/client/code-generation.md",tags:[],version:"current",frontMatter:{title:"Code generation"},sidebar:"docs",previous:{title:"Query DSL",permalink:"/gql/docs/client/dsl"},next:{title:"Http4s",permalink:"/gql/docs/client/integrations/http4s"}},s={},c=[{value:"Setting up",id:"setting-up",level:2},{value:"Sbt integration",id:"sbt-integration",level:3},{value:"Usage",id:"usage",level:2}],u={toc:c};function p(e){let{components:n,...t}=e;return(0,l.kt)("wrapper",(0,r.Z)({},u,t,{components:n,mdxType:"MDXLayout"}),(0,l.kt)("p",null,"Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries."),(0,l.kt)("p",null,"gql also features a code generator that transforms a graphql schema file and a set of queries (or fragments) into dsl code."),(0,l.kt)("h2",{id:"setting-up"},"Setting up"),(0,l.kt)("p",null,"The code generator comes as a stand-alone cli at the maven coordinates:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'// build.sbt\n"io.github.valdemargr" %% "gql-client-codegen-cli" % "0.3.5"\n')),(0,l.kt)("p",null,"The code generator can also be integrated into sbt for a smoother development experience:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'// project/plugins.sbt\naddSbtPlugin("io.github.valdemargr" % "gql-client-codegen-sbt" % "0.3.5")\n')),(0,l.kt)("h3",{id:"sbt-integration"},"Sbt integration"),(0,l.kt)("p",null,"By default the sbt integration will look for a schema file in the resources directory at ",(0,l.kt)("inlineCode",{parentName:"p"},".../resources/schema.graphql")," and queries in the resources directory at ",(0,l.kt)("inlineCode",{parentName:"p"},".../resources/queries"),"."),(0,l.kt)("p",null,"You can, however, override or add more sources at custom locations:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'lazy val myBuild = \n ...\n .settings(\n resourceGroups += Gql.resourceGroup(\n name="other_resources",\n schemaFile= file("path/to/schema.graphql"),\n file("path/to/query1.graphql"),\n file("path/to/query2.graphql")\n )\n )\n')),(0,l.kt)("h2",{id:"usage"},"Usage"),(0,l.kt)("p",null,"When the code-generator is invoked it will use the queries and fragments in combination with the schema to generate a set of scala files containing the equivalent query in scala code."),(0,l.kt)("p",null,"For this demonstration, the code generator will be invoked manually:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'import gql.client.codegen.{ GeneratorCli => Gen }\nimport fs2.io.file.Files\nimport cats.effect._\nimport cats.implicits._\nimport cats.effect.unsafe.implicits.global\n\ndef runQuery(queryDef: String) =\n Files[IO].tempDirectory.use{ tmp => \n val schemaFile = tmp / "schema.graphql"\n val queryFile = tmp / "query.graphql"\n val sharedOutFile = tmp / "shared.scala"\n val queryOutFile = tmp / "query.scala"\n\n val schemaDef = """\n enum HelloEnum {\n HELLO,\n WORLD\n }\n\n type A {\n a: String\n }\n\n type B {\n b: String\n }\n\n union HelloUnion = A | B\n\n type Query {\n helloEnum(name: String): HelloEnum,\n helloUnion(name2: String): HelloUnion\n }\n """\n\n val writeSchemaF = fs2.Stream(schemaDef)\n .through(fs2.text.utf8.encode)\n .through(Files[IO].writeAll(schemaFile))\n .compile\n .drain\n\n val writeQueryF = fs2.Stream(queryDef)\n .through(fs2.text.utf8.encode)\n .through(Files[IO].writeAll(queryFile))\n .compile\n .drain\n\n import io.circe._\n import io.circe.syntax._\n val jo = Json.obj(\n "schema" -> Json.fromString(schemaFile.toString),\n "shared" -> Json.fromString(sharedOutFile.toString),\n "queries" -> Json.arr(\n Json.obj(\n "query" -> Json.fromString(queryFile.toString),\n "output" -> Json.fromString(queryOutFile.toString)\n )\n )\n )\n\n writeSchemaF >>\n writeQueryF >>\n Gen.run(List("--validate", "--input",jo.spaces2)) >>\n Files[IO].readAll(queryOutFile)\n .through(fs2.text.utf8.decode)\n .compile\n .string\n .map(println)\n }.unsafeRunSync()\n\nrunQuery(\n """\n fragment HelloFragment on Query {\n helloEnum(name: $name)\n }\n\n query HelloQuery($name: String) {\n ...HelloFragment\n helloUnion(name2: "hey") {\n ... on A {\n a\n }\n ... on B {\n b\n }\n }\n }\n """\n)\n// package gql.client.generated\n// \n// import _root_.gql.client._\n// import _root_.gql.client.dsl._\n// import _root_.gql.parser.{Value => V, AnyValue, Const}\n// import cats.implicits._\n// \n// final case class HelloFragment(\n// helloEnum: Option[HelloEnum]\n// )\n// \n// object HelloFragment {\n// implicit val selectionSet: SelectionSet[HelloFragment] = (\n// sel.build[Option[HelloEnum]]("helloEnum", x => x.args(arg("name", V.VariableValue("name"))))\n// ).map(apply)\n// \n// implicit val fragdef: Fragment[HelloFragment] = fragment[HelloFragment]("HelloFragment", "Query")\n// }\n// \n// final case class HelloQuery(\n// helloFragment: gql.client.generated.HelloFragment,\n// helloUnion: Option[HelloQuery.HelloUnion]\n// )\n// \n// object HelloQuery {\n// final case class HelloUnion(\n// a: Option[HelloUnion.InlineA],\n// b: Option[HelloUnion.InlineB]\n// ) {\n// lazy val variant: Option[HelloUnion.Variant] = \n// (a).map(HelloUnion.Variant.OnA.apply) orElse\n// (b).map(HelloUnion.Variant.OnB.apply)\n// }\n// \n// object HelloUnion {\n// sealed trait Variant extends Product with Serializable\n// object Variant {\n// final case class OnA(\n// a: HelloUnion.InlineA\n// ) extends Variant\n// \n// final case class OnB(\n// b: HelloUnion.InlineB\n// ) extends Variant\n// }\n// \n// final case class InlineA(\n// a: Option[String]\n// )\n// \n// object InlineA {\n// implicit val selectionSet: SelectionSet[InlineA] = (\n// sel.build[Option[String]]("a", x => x)\n// ).map(apply)\n// }\n// \n// final case class InlineB(\n// b: Option[String]\n// )\n// \n// object InlineB {\n// implicit val selectionSet: SelectionSet[InlineB] = (\n// sel.build[Option[String]]("b", x => x)\n// ).map(apply)\n// }\n// \n// implicit val selectionSet: SelectionSet[HelloUnion] = (\n// inlineFrag.build[HelloUnion.InlineA]("A", x => x),\n// inlineFrag.build[HelloUnion.InlineB]("B", x => x)\n// ).mapN(apply)\n// }\n// \n// implicit val selectionSet: SelectionSet[HelloQuery] = (\n// fragment.spread.build[gql.client.generated.HelloFragment](x => x).requiredFragment("HelloFragment", "Query"),\n// sel.build[Option[HelloQuery.HelloUnion]]("helloUnion", x => x.args(arg("name2", V.StringValue("hey"))))\n// ).mapN(apply)\n// \n// final case class Variables(\n// name: Option[Option[String]] = None\n// ) {\n// def setName(value: Option[String]): Variables = copy(name = Some(value))\n// }\n// \n// val queryExpr = (\n// omittableVariable[Option[String]]("name")\n// ).introduce{ _ =>\n// selectionSet\n// }\n// \n// val query = _root_.gql.client.Query.parameterized(_root_.gql.parser.QueryAst.OperationType.Query, "HelloQuery", queryExpr)\n// }\n')),(0,l.kt)("p",null,"When supplying the ",(0,l.kt)("inlineCode",{parentName:"p"},"--validate")," flag, gql will generate a stub implementation of the schema and run the same code as if running a gql server."),(0,l.kt)("p",null,"Lets construct a helper to show this:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'import scala.util.{Try,Failure}\n// We will also remove the ansii color codes from the output, since they don\'t render well in the docs\ndef runFail(q: String) = \n Try {\n runQuery(q)\n } match {\n case Failure(ex) => println(ex.getMessage().replaceAll("\\u001B\\\\[[;\\\\d]*m", ""))\n }\n')),(0,l.kt)("p",null,"Now with a parsing error:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'runFail(\n """\n query MyQuery {\n test.,test\n }\n """\n)\n// Failed to generate code with error: failed at offset 41 on line 2 with code 46\n// char in range } to } (code 125 to 125)\n// for document:\n// | \n// | query MyQuery {\n// | test.,test\n// | >>>>>>>>>>>>>^^^^^^^ line:2, column:16, offset:41, character code code:46\n// | }\n// |\n')),(0,l.kt)("p",null,"And also with a query validation error:"),(0,l.kt)("pre",null,(0,l.kt)("code",{parentName:"pre",className:"language-scala"},'runFail(\n """\n query MyQuery {\n helloEnum(name: 1)\n }\n """\n)\n// Failed to generate code with error: decoding failure for type `String` with message Got value \'1\' with wrong type, expecting string at root.helloEnum.name.String\n// in file /tmp/9938872561877259213/query.graphql\n// | \n// | query MyQuery {\n// | helloEnum(name: 1)\n// | >>>>>>>>>>>>>>>>>>>>>>>>>^^^^^^^ line:2, column:28, offset:53, character code code:49\n// | }\n// |\n')))}p.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.6261d4fd.js b/assets/js/runtime~main.6261d4fd.js new file mode 100644 index 00000000..04edc06b --- /dev/null +++ b/assets/js/runtime~main.6261d4fd.js @@ -0,0 +1 @@ +(()=>{"use strict";var e,a,t,r,f,d={},o={};function n(e){var a=o[e];if(void 0!==a)return a.exports;var t=o[e]={id:e,loaded:!1,exports:{}};return d[e].call(t.exports,t,t.exports,n),t.loaded=!0,t.exports}n.m=d,e=[],n.O=(a,t,r,f)=>{if(!t){var d=1/0;for(i=0;i=f)&&Object.keys(n.O).every((e=>n.O[e](t[c])))?t.splice(c--,1):(o=!1,f0&&e[i-1][2]>f;i--)e[i]=e[i-1];e[i]=[t,r,f]},n.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return n.d(a,{a:a}),a},t=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,n.t=function(e,r){if(1&r&&(e=this(e)),8&r)return e;if("object"==typeof e&&e){if(4&r&&e.__esModule)return e;if(16&r&&"function"==typeof e.then)return e}var f=Object.create(null);n.r(f);var d={};a=a||[null,t({}),t([]),t(t)];for(var o=2&r&&e;"object"==typeof o&&!~a.indexOf(o);o=t(o))Object.getOwnPropertyNames(o).forEach((a=>d[a]=()=>e[a]));return d.default=()=>e,n.d(f,d),f},n.d=(e,a)=>{for(var t in a)n.o(a,t)&&!n.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:a[t]})},n.f={},n.e=e=>Promise.all(Object.keys(n.f).reduce(((a,t)=>(n.f[t](e,a),a)),[])),n.u=e=>"assets/js/"+({53:"935f2afb",80:"7c053662",85:"1f391b9e",143:"ceb10064",184:"a72b1aff",227:"018b0880",237:"1df93b7f",268:"09d7b412",287:"53a3a604",338:"2e533e94",357:"f790aebb",372:"1db64337",381:"4f169309",413:"208ff3a3",414:"393be207",436:"0a44bcdb",483:"d81c13dc",508:"62af8b26",514:"1be78505",554:"60875e34",562:"61de56f5",633:"92cae478",645:"de3af6ca",672:"677daa8b",776:"8588ea58",782:"77f00812",899:"98b0d92d",918:"17896441",931:"cd780aef",947:"9bf1d1b7",960:"ffc79f40",970:"078b5d8a"}[e]||e)+"."+{53:"36a83ebe",71:"9a7b255a",80:"16f42e61",85:"59e3fbd3",143:"65987bd1",184:"71796f53",209:"9b190bc7",218:"cdefc61f",227:"ef45eea6",237:"82247894",268:"62db6796",287:"5a56c39e",338:"1cf1df05",357:"e3936d05",366:"2975a453",372:"6f89d1ed",381:"9c6adec9",413:"053f3814",414:"2ff4e720",436:"60a58c3b",483:"cd670fa6",508:"1bedbcd2",514:"ebbe3f23",554:"d98e0bd7",562:"8d829215",633:"5c0b8d4e",645:"1ea8b125",672:"0ae8387b",776:"3b3182e4",782:"39bd5b49",814:"c95b62ab",899:"1f6baab1",918:"a22b9c09",931:"665464b4",947:"3753c4dd",960:"fc52bab8",970:"e0156272",972:"461bb297"}[e]+".js",n.miniCssF=e=>{},n.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),n.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),r={},f="website:",n.l=(e,a,t,d)=>{if(r[e])r[e].push(a);else{var o,c;if(void 0!==t)for(var b=document.getElementsByTagName("script"),i=0;i{o.onerror=o.onload=null,clearTimeout(s);var f=r[e];if(delete r[e],o.parentNode&&o.parentNode.removeChild(o),f&&f.forEach((e=>e(t))),a)return a(t)},s=setTimeout(u.bind(null,void 0,{type:"timeout",target:o}),12e4);o.onerror=u.bind(null,o.onerror),o.onload=u.bind(null,o.onload),c&&document.head.appendChild(o)}},n.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.nmd=e=>(e.paths=[],e.children||(e.children=[]),e),n.p="/gql/",n.gca=function(e){return e={17896441:"918","935f2afb":"53","7c053662":"80","1f391b9e":"85",ceb10064:"143",a72b1aff:"184","018b0880":"227","1df93b7f":"237","09d7b412":"268","53a3a604":"287","2e533e94":"338",f790aebb:"357","1db64337":"372","4f169309":"381","208ff3a3":"413","393be207":"414","0a44bcdb":"436",d81c13dc:"483","62af8b26":"508","1be78505":"514","60875e34":"554","61de56f5":"562","92cae478":"633",de3af6ca:"645","677daa8b":"672","8588ea58":"776","77f00812":"782","98b0d92d":"899",cd780aef:"931","9bf1d1b7":"947",ffc79f40:"960","078b5d8a":"970"}[e]||e,n.p+n.u(e)},(()=>{var e={303:0,532:0};n.f.j=(a,t)=>{var r=n.o(e,a)?e[a]:void 0;if(0!==r)if(r)t.push(r[2]);else if(/^(303|532)$/.test(a))e[a]=0;else{var f=new Promise(((t,f)=>r=e[a]=[t,f]));t.push(r[2]=f);var d=n.p+n.u(a),o=new Error;n.l(d,(t=>{if(n.o(e,a)&&(0!==(r=e[a])&&(e[a]=void 0),r)){var f=t&&("load"===t.type?"missing":t.type),d=t&&t.target&&t.target.src;o.message="Loading chunk "+a+" failed.\n("+f+": "+d+")",o.name="ChunkLoadError",o.type=f,o.request=d,r[1](o)}}),"chunk-"+a,a)}},n.O.j=a=>0===e[a];var a=(a,t)=>{var r,f,d=t[0],o=t[1],c=t[2],b=0;if(d.some((a=>0!==e[a]))){for(r in o)n.o(o,r)&&(n.m[r]=o[r]);if(c)var i=c(n)}for(a&&a(t);b{"use strict";var e,t,a,r,f,d={},o={};function n(e){var t=o[e];if(void 0!==t)return t.exports;var a=o[e]={id:e,loaded:!1,exports:{}};return d[e].call(a.exports,a,a.exports,n),a.loaded=!0,a.exports}n.m=d,e=[],n.O=(t,a,r,f)=>{if(!a){var d=1/0;for(i=0;i=f)&&Object.keys(n.O).every((e=>n.O[e](a[c])))?a.splice(c--,1):(o=!1,f0&&e[i-1][2]>f;i--)e[i]=e[i-1];e[i]=[a,r,f]},n.n=e=>{var t=e&&e.__esModule?()=>e.default:()=>e;return n.d(t,{a:t}),t},a=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,n.t=function(e,r){if(1&r&&(e=this(e)),8&r)return e;if("object"==typeof e&&e){if(4&r&&e.__esModule)return e;if(16&r&&"function"==typeof e.then)return e}var f=Object.create(null);n.r(f);var d={};t=t||[null,a({}),a([]),a(a)];for(var o=2&r&&e;"object"==typeof o&&!~t.indexOf(o);o=a(o))Object.getOwnPropertyNames(o).forEach((t=>d[t]=()=>e[t]));return d.default=()=>e,n.d(f,d),f},n.d=(e,t)=>{for(var a in t)n.o(t,a)&&!n.o(e,a)&&Object.defineProperty(e,a,{enumerable:!0,get:t[a]})},n.f={},n.e=e=>Promise.all(Object.keys(n.f).reduce(((t,a)=>(n.f[a](e,t),t)),[])),n.u=e=>"assets/js/"+({53:"935f2afb",80:"7c053662",85:"1f391b9e",143:"ceb10064",184:"a72b1aff",227:"018b0880",237:"1df93b7f",268:"09d7b412",287:"53a3a604",338:"2e533e94",357:"f790aebb",372:"1db64337",381:"4f169309",413:"208ff3a3",414:"393be207",436:"0a44bcdb",483:"d81c13dc",508:"62af8b26",514:"1be78505",554:"60875e34",562:"61de56f5",633:"92cae478",645:"de3af6ca",672:"677daa8b",776:"8588ea58",782:"77f00812",899:"98b0d92d",918:"17896441",931:"cd780aef",947:"9bf1d1b7",960:"ffc79f40",970:"078b5d8a"}[e]||e)+"."+{53:"36a83ebe",71:"9a7b255a",80:"16f42e61",85:"59e3fbd3",143:"fb80a129",184:"71796f53",209:"9b190bc7",218:"cdefc61f",227:"1f85cf71",237:"82247894",268:"62db6796",287:"5a56c39e",338:"1cf1df05",357:"e3936d05",366:"2975a453",372:"6f89d1ed",381:"ad8bcbf1",413:"053f3814",414:"2ff4e720",436:"8d243e93",483:"cd670fa6",508:"18142116",514:"ebbe3f23",554:"d98e0bd7",562:"8d829215",633:"9c63c4e7",645:"1ea8b125",672:"a97d5fbe",776:"e8321fab",782:"39bd5b49",814:"c95b62ab",899:"1f6baab1",918:"a22b9c09",931:"665464b4",947:"3753c4dd",960:"9c6cb8e8",970:"e0156272",972:"461bb297"}[e]+".js",n.miniCssF=e=>{},n.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),n.o=(e,t)=>Object.prototype.hasOwnProperty.call(e,t),r={},f="website:",n.l=(e,t,a,d)=>{if(r[e])r[e].push(t);else{var o,c;if(void 0!==a)for(var b=document.getElementsByTagName("script"),i=0;i{o.onerror=o.onload=null,clearTimeout(s);var f=r[e];if(delete r[e],o.parentNode&&o.parentNode.removeChild(o),f&&f.forEach((e=>e(a))),t)return t(a)},s=setTimeout(u.bind(null,void 0,{type:"timeout",target:o}),12e4);o.onerror=u.bind(null,o.onerror),o.onload=u.bind(null,o.onload),c&&document.head.appendChild(o)}},n.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},n.nmd=e=>(e.paths=[],e.children||(e.children=[]),e),n.p="/gql/",n.gca=function(e){return e={17896441:"918","935f2afb":"53","7c053662":"80","1f391b9e":"85",ceb10064:"143",a72b1aff:"184","018b0880":"227","1df93b7f":"237","09d7b412":"268","53a3a604":"287","2e533e94":"338",f790aebb:"357","1db64337":"372","4f169309":"381","208ff3a3":"413","393be207":"414","0a44bcdb":"436",d81c13dc:"483","62af8b26":"508","1be78505":"514","60875e34":"554","61de56f5":"562","92cae478":"633",de3af6ca:"645","677daa8b":"672","8588ea58":"776","77f00812":"782","98b0d92d":"899",cd780aef:"931","9bf1d1b7":"947",ffc79f40:"960","078b5d8a":"970"}[e]||e,n.p+n.u(e)},(()=>{var e={303:0,532:0};n.f.j=(t,a)=>{var r=n.o(e,t)?e[t]:void 0;if(0!==r)if(r)a.push(r[2]);else if(/^(303|532)$/.test(t))e[t]=0;else{var f=new Promise(((a,f)=>r=e[t]=[a,f]));a.push(r[2]=f);var d=n.p+n.u(t),o=new Error;n.l(d,(a=>{if(n.o(e,t)&&(0!==(r=e[t])&&(e[t]=void 0),r)){var f=a&&("load"===a.type?"missing":a.type),d=a&&a.target&&a.target.src;o.message="Loading chunk "+t+" failed.\n("+f+": "+d+")",o.name="ChunkLoadError",o.type=f,o.request=d,r[1](o)}}),"chunk-"+t,t)}},n.O.j=t=>0===e[t];var t=(t,a)=>{var r,f,d=a[0],o=a[1],c=a[2],b=0;if(d.some((t=>0!==e[t]))){for(r in o)n.o(o,r)&&(n.m[r]=o[r]);if(c)var i=c(n)}for(t&&t(a);b Code generation | gql - +
-

Code generation

Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries.

gql also features a code generator that transforms a graphql schema file and a set of queries (or fragments) into dsl code.

Setting up​

The code generator comes as a stand-alone cli at the maven coordinates:

// build.sbt
"io.github.valdemargr" %% "gql-client-codegen-cli" % "0.3.5"

The code generator can also be integrated into sbt for a smoother development experience:

// project/plugins.sbt
addSbtPlugin("io.github.valdemargr" % "gql-client-codegen-sbt" % "0.3.5")

Sbt integration​

By default the sbt integration will look for a schema file in the resources directory at .../resources/schema.graphql and queries in the resources directory at .../resources/queries.

You can, however, override or add more sources at custom locations:

lazy val myBuild = 
...
.settings(
resourceGroups += Gql.resourceGroup(
name="other_resources",
schemaFile= file("path/to/schema.graphql"),
file("path/to/query1.graphql"),
file("path/to/query2.graphql")
)
)

Usage​

When the code-generator is invoked it will use the queries and fragments in combination with the schema to generate a set of scala files containing the equivalent query in scala code.

For this demonstration, the code generator will be invoked manually:

import gql.client.codegen.{ GeneratorCli => Gen }
import fs2.io.file.Files
import cats.effect._
import cats.implicits._
import cats.effect.unsafe.implicits.global

def runQuery(queryDef: String) =
Files[IO].tempDirectory.use{ tmp =>
val schemaFile = tmp / "schema.graphql"
val queryFile = tmp / "query.graphql"
val sharedOutFile = tmp / "shared.scala"
val queryOutFile = tmp / "query.scala"

val schemaDef = """
enum HelloEnum {
HELLO,
WORLD
}

type A {
a: String
}

type B {
b: String
}

union HelloUnion = A | B

type Query {
helloEnum(name: String): HelloEnum,
helloUnion(name2: String): HelloUnion
}
"""

val writeSchemaF = fs2.Stream(schemaDef)
.through(fs2.text.utf8.encode)
.through(Files[IO].writeAll(schemaFile))
.compile
.drain

val writeQueryF = fs2.Stream(queryDef)
.through(fs2.text.utf8.encode)
.through(Files[IO].writeAll(queryFile))
.compile
.drain

import io.circe._
import io.circe.syntax._
val jo = Json.obj(
"schema" -> Json.fromString(schemaFile.toString),
"shared" -> Json.fromString(sharedOutFile.toString),
"queries" -> Json.arr(
Json.obj(
"query" -> Json.fromString(queryFile.toString),
"output" -> Json.fromString(queryOutFile.toString)
)
)
)

writeSchemaF >>
writeQueryF >>
Gen.run(List("--validate", "--input",jo.spaces2)) >>
Files[IO].readAll(queryOutFile)
.through(fs2.text.utf8.decode)
.compile
.string
.map(println)
}.unsafeRunSync()

runQuery(
"""
fragment HelloFragment on Query {
helloEnum(name: $name)
}

query HelloQuery($name: String) {
...HelloFragment
helloUnion(name2: "hey") {
... on A {
a
}
... on B {
b
}
}
}
"""
)
// package gql.client.generated
//
// import _root_.gql.client._
// import _root_.gql.client.dsl._
// import _root_.gql.parser.{Value => V, AnyValue, Const}
// import cats.implicits._
//
// final case class HelloFragment(
// helloEnum: Option[HelloEnum]
// )
//
// object HelloFragment {
// implicit val selectionSet: SelectionSet[HelloFragment] = (
// sel.build[Option[HelloEnum]]("helloEnum", x => x.args(arg("name", V.VariableValue("name"))))
// ).map(apply)
//
// implicit val fragdef: Fragment[HelloFragment] = fragment[HelloFragment]("HelloFragment", "Query")
// }
//
// final case class HelloQuery(
// helloFragment: gql.client.generated.HelloFragment,
// helloUnion: Option[HelloQuery.HelloUnion]
// )
//
// object HelloQuery {
// final case class HelloUnion(
// a: Option[HelloUnion.InlineA],
// b: Option[HelloUnion.InlineB]
// ) {
// lazy val variant: Option[HelloUnion.Variant] =
// (a).map(HelloUnion.Variant.OnA.apply) orElse
// (b).map(HelloUnion.Variant.OnB.apply)
// }
//
// object HelloUnion {
// sealed trait Variant extends Product with Serializable
// object Variant {
// final case class OnA(
// a: HelloUnion.InlineA
// ) extends Variant
//
// final case class OnB(
// b: HelloUnion.InlineB
// ) extends Variant
// }
//
// final case class InlineA(
// a: Option[String]
// )
//
// object InlineA {
// implicit val selectionSet: SelectionSet[InlineA] = (
// sel.build[Option[String]]("a", x => x)
// ).map(apply)
// }
//
// final case class InlineB(
// b: Option[String]
// )
//
// object InlineB {
// implicit val selectionSet: SelectionSet[InlineB] = (
// sel.build[Option[String]]("b", x => x)
// ).map(apply)
// }
//
// implicit val selectionSet: SelectionSet[HelloUnion] = (
// inlineFrag.build[HelloUnion.InlineA]("A", x => x),
// inlineFrag.build[HelloUnion.InlineB]("B", x => x)
// ).mapN(apply)
// }
//
// implicit val selectionSet: SelectionSet[HelloQuery] = (
// fragment.spread.build[gql.client.generated.HelloFragment](x => x).requiredFragment("HelloFragment", "Query"),
// sel.build[Option[HelloQuery.HelloUnion]]("helloUnion", x => x.args(arg("name2", V.StringValue("hey"))))
// ).mapN(apply)
//
// final case class Variables(
// name: Option[Option[String]] = None
// ) {
// def setName(value: Option[String]): Variables = copy(name = Some(value))
// }
//
// val queryExpr = (
// omittableVariable[Option[String]]("name")
// ).introduce{ _ =>
// selectionSet
// }
//
// val query = _root_.gql.client.Query.parameterized(_root_.gql.parser.QueryAst.OperationType.Query, "HelloQuery", queryExpr)
// }

When supplying the --validate flag, gql will generate a stub implementation of the schema and run the same code as if running a gql server.

Lets construct a helper to show this:

import scala.util.{Try,Failure}
// We will also remove the ansii color codes from the output, since they don't render well in the docs
def runFail(q: String) =
Try {
runQuery(q)
} match {
case Failure(ex) => println(ex.getMessage().replaceAll("\u001B\\[[;\\d]*m", ""))
}

Now with a parsing error:

runFail(
"""
query MyQuery {
test.,test
}
"""
)
// Failed to generate code with error: failed at offset 41 on line 2 with code 46
// char in range } to } (code 125 to 125)
// for document:
// |
// | query MyQuery {
// | test.,test
// | >>>>>>>>>>>>>^^^^^^^ line:2, column:16, offset:41, character code code:46
// | }
// |

And also with a query validation error:

runFail(
"""
query MyQuery {
helloEnum(name: 1)
}
"""
)
// Failed to generate code with error: decoding failure for type `String` with message Got value '1' with wrong type, expecting string at root.helloEnum.name.String
// in file /tmp/12148183625915676551/query.graphql
// |
// | query MyQuery {
// | helloEnum(name: 1)
// | >>>>>>>>>>>>>>>>>>>>>>>>>^^^^^^^ line:2, column:28, offset:53, character code code:49
// | }
// |
- +

Code generation

Writing queries in scala using the dsl is more concise and type-safe than writing out the types and codecs by hand, but still requires a lot of code for non-trivial queries.

gql also features a code generator that transforms a graphql schema file and a set of queries (or fragments) into dsl code.

Setting up​

The code generator comes as a stand-alone cli at the maven coordinates:

// build.sbt
"io.github.valdemargr" %% "gql-client-codegen-cli" % "0.3.5"

The code generator can also be integrated into sbt for a smoother development experience:

// project/plugins.sbt
addSbtPlugin("io.github.valdemargr" % "gql-client-codegen-sbt" % "0.3.5")

Sbt integration​

By default the sbt integration will look for a schema file in the resources directory at .../resources/schema.graphql and queries in the resources directory at .../resources/queries.

You can, however, override or add more sources at custom locations:

lazy val myBuild = 
...
.settings(
resourceGroups += Gql.resourceGroup(
name="other_resources",
schemaFile= file("path/to/schema.graphql"),
file("path/to/query1.graphql"),
file("path/to/query2.graphql")
)
)

Usage​

When the code-generator is invoked it will use the queries and fragments in combination with the schema to generate a set of scala files containing the equivalent query in scala code.

For this demonstration, the code generator will be invoked manually:

import gql.client.codegen.{ GeneratorCli => Gen }
import fs2.io.file.Files
import cats.effect._
import cats.implicits._
import cats.effect.unsafe.implicits.global

def runQuery(queryDef: String) =
Files[IO].tempDirectory.use{ tmp =>
val schemaFile = tmp / "schema.graphql"
val queryFile = tmp / "query.graphql"
val sharedOutFile = tmp / "shared.scala"
val queryOutFile = tmp / "query.scala"

val schemaDef = """
enum HelloEnum {
HELLO,
WORLD
}

type A {
a: String
}

type B {
b: String
}

union HelloUnion = A | B

type Query {
helloEnum(name: String): HelloEnum,
helloUnion(name2: String): HelloUnion
}
"""

val writeSchemaF = fs2.Stream(schemaDef)
.through(fs2.text.utf8.encode)
.through(Files[IO].writeAll(schemaFile))
.compile
.drain

val writeQueryF = fs2.Stream(queryDef)
.through(fs2.text.utf8.encode)
.through(Files[IO].writeAll(queryFile))
.compile
.drain

import io.circe._
import io.circe.syntax._
val jo = Json.obj(
"schema" -> Json.fromString(schemaFile.toString),
"shared" -> Json.fromString(sharedOutFile.toString),
"queries" -> Json.arr(
Json.obj(
"query" -> Json.fromString(queryFile.toString),
"output" -> Json.fromString(queryOutFile.toString)
)
)
)

writeSchemaF >>
writeQueryF >>
Gen.run(List("--validate", "--input",jo.spaces2)) >>
Files[IO].readAll(queryOutFile)
.through(fs2.text.utf8.decode)
.compile
.string
.map(println)
}.unsafeRunSync()

runQuery(
"""
fragment HelloFragment on Query {
helloEnum(name: $name)
}

query HelloQuery($name: String) {
...HelloFragment
helloUnion(name2: "hey") {
... on A {
a
}
... on B {
b
}
}
}
"""
)
// package gql.client.generated
//
// import _root_.gql.client._
// import _root_.gql.client.dsl._
// import _root_.gql.parser.{Value => V, AnyValue, Const}
// import cats.implicits._
//
// final case class HelloFragment(
// helloEnum: Option[HelloEnum]
// )
//
// object HelloFragment {
// implicit val selectionSet: SelectionSet[HelloFragment] = (
// sel.build[Option[HelloEnum]]("helloEnum", x => x.args(arg("name", V.VariableValue("name"))))
// ).map(apply)
//
// implicit val fragdef: Fragment[HelloFragment] = fragment[HelloFragment]("HelloFragment", "Query")
// }
//
// final case class HelloQuery(
// helloFragment: gql.client.generated.HelloFragment,
// helloUnion: Option[HelloQuery.HelloUnion]
// )
//
// object HelloQuery {
// final case class HelloUnion(
// a: Option[HelloUnion.InlineA],
// b: Option[HelloUnion.InlineB]
// ) {
// lazy val variant: Option[HelloUnion.Variant] =
// (a).map(HelloUnion.Variant.OnA.apply) orElse
// (b).map(HelloUnion.Variant.OnB.apply)
// }
//
// object HelloUnion {
// sealed trait Variant extends Product with Serializable
// object Variant {
// final case class OnA(
// a: HelloUnion.InlineA
// ) extends Variant
//
// final case class OnB(
// b: HelloUnion.InlineB
// ) extends Variant
// }
//
// final case class InlineA(
// a: Option[String]
// )
//
// object InlineA {
// implicit val selectionSet: SelectionSet[InlineA] = (
// sel.build[Option[String]]("a", x => x)
// ).map(apply)
// }
//
// final case class InlineB(
// b: Option[String]
// )
//
// object InlineB {
// implicit val selectionSet: SelectionSet[InlineB] = (
// sel.build[Option[String]]("b", x => x)
// ).map(apply)
// }
//
// implicit val selectionSet: SelectionSet[HelloUnion] = (
// inlineFrag.build[HelloUnion.InlineA]("A", x => x),
// inlineFrag.build[HelloUnion.InlineB]("B", x => x)
// ).mapN(apply)
// }
//
// implicit val selectionSet: SelectionSet[HelloQuery] = (
// fragment.spread.build[gql.client.generated.HelloFragment](x => x).requiredFragment("HelloFragment", "Query"),
// sel.build[Option[HelloQuery.HelloUnion]]("helloUnion", x => x.args(arg("name2", V.StringValue("hey"))))
// ).mapN(apply)
//
// final case class Variables(
// name: Option[Option[String]] = None
// ) {
// def setName(value: Option[String]): Variables = copy(name = Some(value))
// }
//
// val queryExpr = (
// omittableVariable[Option[String]]("name")
// ).introduce{ _ =>
// selectionSet
// }
//
// val query = _root_.gql.client.Query.parameterized(_root_.gql.parser.QueryAst.OperationType.Query, "HelloQuery", queryExpr)
// }

When supplying the --validate flag, gql will generate a stub implementation of the schema and run the same code as if running a gql server.

Lets construct a helper to show this:

import scala.util.{Try,Failure}
// We will also remove the ansii color codes from the output, since they don't render well in the docs
def runFail(q: String) =
Try {
runQuery(q)
} match {
case Failure(ex) => println(ex.getMessage().replaceAll("\u001B\\[[;\\d]*m", ""))
}

Now with a parsing error:

runFail(
"""
query MyQuery {
test.,test
}
"""
)
// Failed to generate code with error: failed at offset 41 on line 2 with code 46
// char in range } to } (code 125 to 125)
// for document:
// |
// | query MyQuery {
// | test.,test
// | >>>>>>>>>>>>>^^^^^^^ line:2, column:16, offset:41, character code code:46
// | }
// |

And also with a query validation error:

runFail(
"""
query MyQuery {
helloEnum(name: 1)
}
"""
)
// Failed to generate code with error: decoding failure for type `String` with message Got value '1' with wrong type, expecting string at root.helloEnum.name.String
// in file /tmp/9938872561877259213/query.graphql
// |
// | query MyQuery {
// | helloEnum(name: 1)
// | >>>>>>>>>>>>>>>>>>>>>>>>>^^^^^^^ line:2, column:28, offset:53, character code code:49
// | }
// |
+ \ No newline at end of file diff --git a/docs/client/dsl/index.html b/docs/client/dsl/index.html index 75cec54e..e7757bd6 100644 --- a/docs/client/dsl/index.html +++ b/docs/client/dsl/index.html @@ -4,18 +4,18 @@ Query DSL | gql - +

Query DSL

gql provides a dsl for building graphql queries and response parsers. -When you compose your query with the dsl, you automatically compose both a query and a json decoder for the query response.

Selections​

The simplest combinator is sel which declares a field selection:

import gql.client._
import gql.client.dsl._
import cats.implicits._

sel[Option[String]]("name")
// res0: SelectionSet[Option[String]] = SelectionSet(
// impl = Fmap(
// fa = Lift(
// fa = Field(
// fieldName = "name",
// alias0 = None,
// args0 = List(),
// subQuery = OptionModifier(
// subQuery = Terminal(decoder = io.circe.Decoder$$anon$26@70a69b28)
// ),
// directives0 = List()
// )
// ),
// f = gql.client.SelectionSet$$$Lambda$12521/0x0000000803257040@62b9abd6
// )
// )

Most combinators in the dsl have multiple overloads to provide various features.

sel.build[Option[String]]("name", _.alias("n"))

sel.build[Option[String]]("name", _.args(arg("id", 42)))

Every selection related structure forms an Applicative such that you can compose multiple selections together:

val s1 = sel[Option[String]]("name")

val s2 = sel[Option[Int]]("age")

val s3: SelectionSet[(Option[String], Option[Int])] = (s1, s2).tupled

final case class PersonQuery(name: Option[String], age: Option[Int])

val pq: SelectionSet[PersonQuery] = (s1, s2).mapN(PersonQuery.apply)

Queries can also act as sub-selections (SubQuery in gql):

sel[PersonQuery]("person") {
pq
}

In the first examples the sub-query is captured implicitly. +When you compose your query with the dsl, you automatically compose both a query and a json decoder for the query response.

Selections​

The simplest combinator is sel which declares a field selection:

import gql.client._
import gql.client.dsl._
import cats.implicits._

sel[Option[String]]("name")
// res0: SelectionSet[Option[String]] = SelectionSet(
// impl = Fmap(
// fa = Lift(
// fa = Field(
// fieldName = "name",
// alias0 = None,
// args0 = List(),
// subQuery = OptionModifier(
// subQuery = Terminal(decoder = io.circe.Decoder$$anon$26@1566a132)
// ),
// directives0 = List()
// )
// ),
// f = gql.client.SelectionSet$$$Lambda$12464/0x00000008033f6040@32bbd17f
// )
// )

Most combinators in the dsl have multiple overloads to provide various features.

sel.build[Option[String]]("name", _.alias("n"))

sel.build[Option[String]]("name", _.args(arg("id", 42)))

Every selection related structure forms an Applicative such that you can compose multiple selections together:

val s1 = sel[Option[String]]("name")

val s2 = sel[Option[Int]]("age")

val s3: SelectionSet[(Option[String], Option[Int])] = (s1, s2).tupled

final case class PersonQuery(name: Option[String], age: Option[Int])

val pq: SelectionSet[PersonQuery] = (s1, s2).mapN(PersonQuery.apply)

Queries can also act as sub-selections (SubQuery in gql):

sel[PersonQuery]("person") {
pq
}

In the first examples the sub-query is captured implicitly. We can also do this for custom types:

implicit val pq2: SelectionSet[PersonQuery] = pq

sel[PersonQuery]("person")

Fragments​

Like in graphql we can define fragments to reuse selections:

val frag = fragment[String]("MyFragment", on="Person") {
sel[String]("name")
}

val fragmentSpreads = sel[(Option[String], Option[Int])]("person") {
(
fragment.spread(frag),
inlineFrag[Int]("Person") {
sel[Int]("age")
}
).tupled
}

Notice that both fragment and inlineFrag return an optional result. This is because the spread may not match on the type (if the spread condition is a sub-type of the spread-on type). -This is not always the desired behavior, and as such, fragments can be required:

fragment.spread(frag).required: SelectionSet[String]

You can provide additional information, should the fragment turn out to actually be missing:

fragment.spread(frag).requiredFragment("MyFragment", on="Person")
info

Fragments should be preferred over re-using selections directly to reduce the rendered query size.

Variables​

Variables are accumulated into a sort of writer monad, such that they can be declared ad-hoc:

variable[String]("name")
// res7: Var[String, VariableName[String]] = Var(
// impl = WriterT(
// run = (
// Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = None
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@19b1f55a
// )
// ),
// variableNames = VariableName(name = "name")
// )

Variables can be combined with the ~ operator:

variable[String]("name") ~ variable[Int]("age")
// res8: Var[(String, Int), (VariableName[String], VariableName[Int])] = Var(
// impl = WriterT(
// run = (
// Append(
// leftNE = Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = None
// )
// ),
// rightNE = Singleton(
// a = One(
// name = VariableName(name = "age"),
// tpe = "Int!",
// default = None
// )
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@488eb721
// )
// ),
// variableNames = (VariableName(name = "name"), VariableName(name = "age"))
// )

Variables can also be declared as omittable, optionally with a default value:

omittableVariable[String]("name", value("John")) ~
omittableVariable[Int]("age")
// res9: Var[(Option[String], Option[Int]), (VariableName[String], VariableName[Int])] = Var(
// impl = WriterT(
// run = (
// Append(
// leftNE = Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = Some(value = StringValue(v = "John", c = ()))
// )
// ),
// rightNE = Singleton(
// a = One(
// name = VariableName(name = "age"),
// tpe = "Int!",
// default = None
// )
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@2e03f5ad
// )
// ),
// variableNames = (VariableName(name = "name"), VariableName(name = "age"))
// )

Variables can be "materialized" into a VariableClosure by introducing them to a query:

// Given a variable of type String, we can construct a query that returns an Int
val queryWithVariable: VariableClosure[String, Int] =
variable[String]("name").introduce{ name: VariableName[String] =>
sel.build[Int]("id", _.args(arg("name", name)))
}

VariableClosure can be combined via ~ and have their selections modified via modify:

def subQuery1: VariableClosure[String, Int] = queryWithVariable

def subQuery2: VariableClosure[String, Int] =
variable[String]("name2").introduce{ name: VariableName[String] =>
sel.build[Int]("id2", _.args(arg("name", name)))
}

def combined: VariableClosure[(String, String), Int] =
(subQuery1 ~ subQuery2).modify(_.map{ case (v1, v2) => v1 + v2 })

// VariableClosure also forms a profunctor so we can also use rmap
(subQuery1 ~ subQuery2).rmap{ case (v1, v2) => v1 + v2 }

Execution​

Once a query has been constructed, there are three ways to wrap it together. +This is not always the desired behavior, and as such, fragments can be required:

fragment.spread(frag).required: SelectionSet[String]

You can provide additional information, should the fragment turn out to actually be missing:

fragment.spread(frag).requiredFragment("MyFragment", on="Person")
info

Fragments should be preferred over re-using selections directly to reduce the rendered query size.

Variables​

Variables are accumulated into a sort of writer monad, such that they can be declared ad-hoc:

variable[String]("name")
// res7: Var[String, VariableName[String]] = Var(
// impl = WriterT(
// run = (
// Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = None
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@6e174085
// )
// ),
// variableNames = VariableName(name = "name")
// )

Variables can be combined with the ~ operator:

variable[String]("name") ~ variable[Int]("age")
// res8: Var[(String, Int), (VariableName[String], VariableName[Int])] = Var(
// impl = WriterT(
// run = (
// Append(
// leftNE = Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = None
// )
// ),
// rightNE = Singleton(
// a = One(
// name = VariableName(name = "age"),
// tpe = "Int!",
// default = None
// )
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@7873a582
// )
// ),
// variableNames = (VariableName(name = "name"), VariableName(name = "age"))
// )

Variables can also be declared as omittable, optionally with a default value:

omittableVariable[String]("name", value("John")) ~
omittableVariable[Int]("age")
// res9: Var[(Option[String], Option[Int]), (VariableName[String], VariableName[Int])] = Var(
// impl = WriterT(
// run = (
// Append(
// leftNE = Singleton(
// a = One(
// name = VariableName(name = "name"),
// tpe = "String!",
// default = Some(value = StringValue(v = "John", c = ()))
// )
// ),
// rightNE = Singleton(
// a = One(
// name = VariableName(name = "age"),
// tpe = "Int!",
// default = None
// )
// )
// ),
// io.circe.Encoder$AsObject$$anon$68@741e4d19
// )
// ),
// variableNames = (VariableName(name = "name"), VariableName(name = "age"))
// )

Variables can be "materialized" into a VariableClosure by introducing them to a query:

// Given a variable of type String, we can construct a query that returns an Int
val queryWithVariable: VariableClosure[String, Int] =
variable[String]("name").introduce{ name: VariableName[String] =>
sel.build[Int]("id", _.args(arg("name", name)))
}

VariableClosure can be combined via ~ and have their selections modified via modify:

def subQuery1: VariableClosure[String, Int] = queryWithVariable

def subQuery2: VariableClosure[String, Int] =
variable[String]("name2").introduce{ name: VariableName[String] =>
sel.build[Int]("id2", _.args(arg("name", name)))
}

def combined: VariableClosure[(String, String), Int] =
(subQuery1 ~ subQuery2).modify(_.map{ case (v1, v2) => v1 + v2 })

// VariableClosure also forms a profunctor so we can also use rmap
(subQuery1 ~ subQuery2).rmap{ case (v1, v2) => v1 + v2 }

Execution​

Once a query has been constructed, there are three ways to wrap it together. simple if the query is parameter-less and name-less, named if your query is named and parameterized if it is both named and parameterized:

import gql.parser.QueryAst.OperationType
def simpleQuery = Query.simple(
OperationType.Query,
sel[Unit]("person") {
(
sel[Int]("id"),
sel.build[Int]("age", _.args(arg("numbers", List(42))))
).tupled.void
}
)

simpleQuery.compile.query
// res11: String = "query { person { age( numbers: [42] ), id } }"

Query.named(
OperationType.Mutation,
"MyMutation",
sel[String]("name")
).compile.query
// res12: String = "mutation MyMutation { name }"

def paramQuery = Query.parameterized(
OperationType.Subscription,
"MySubscription",
combined
)

def compiledParamQuery = paramQuery.compile(("first", "second"))
compiledParamQuery.query
// res13: String = """subscription MySubscription( $name : String!, $name2 : String! ) {
// id2( name: $name2 ),
// id( name: $name )
// }"""

compiledParamQuery.variables
// res14: Option[io.circe.JsonObject] = Some(
// value = object[name -> "first",name2 -> "second"]
// )
- + \ No newline at end of file diff --git a/docs/client/integrations/http4s/index.html b/docs/client/integrations/http4s/index.html index a29ef2d6..4f636467 100644 --- a/docs/client/integrations/http4s/index.html +++ b/docs/client/integrations/http4s/index.html @@ -4,13 +4,13 @@ Http4s | gql - +

Http4s

import org.http4s.client._
import org.http4s.{Query => _, _}
import org.http4s.implicits._
import gql.parser._
import gql.client._
import gql.client.dsl._
import gql.client.http4s.syntax._
import cats.effect._
import cats.effect.unsafe.implicits.global

def q: Query.Compiled[String] = Query.named(
QueryAst.OperationType.Query,
"MyQuery",
sel[String]("name")
).compile

def client: Client[IO] = Client{ _ =>
Resource.pure(Response[IO](Status.Ok).withEntity("""{"data":{"name":"John"}}"""))
}

def result: IO[String] = Request[IO](uri=uri"https://example.com/graphql").graphql(q, client)

result.unsafeRunSync()
// res0: String = "John"
- + \ No newline at end of file diff --git a/docs/overview/index.html b/docs/overview/index.html index 9ea11328..fc5a2361 100644 --- a/docs/overview/index.html +++ b/docs/overview/index.html @@ -4,13 +4,13 @@ Overview of gql | gql - +

Overview of gql

Overview​

  • gql is a library for defining GraphQL servers and clients in Scala.
  • gql embraces a functional code-first approach to GraphQL.
  • gql builds on purely functional abstractions from the Typelevel ecosystem such as cats-parse, cats-effect and fs2.
  • gql is pre-release software, if you find any bugs, please post them on the issue tracker.

Resources​

gql assumes you have knowledge of cats-effect and cats.

No knowledge of the GraphQL is needed to get started with gql's tutorial!

For further information consider:

- + \ No newline at end of file diff --git a/docs/overview/modules/index.html b/docs/overview/modules/index.html index 7c9b6438..4beffccb 100644 --- a/docs/overview/modules/index.html +++ b/docs/overview/modules/index.html @@ -4,13 +4,13 @@ Modules | gql - +

Modules

Gql is published as multiple modules, so you can include what you need.

The available modules are:

// core
libraryDependencies += "io.github.valdemargr" %% "gql-parser" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-core" % "0.3.5",

// server
libraryDependencies += "io.github.valdemargr" %% "gql-server" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-server-http4s" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-natchez" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-server-graphqlws" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-server-goi" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-relational" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-relational-skunk" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-relational-doobie" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-monadic-arrow" % "0.3.5",

// client
libraryDependencies += "io.github.valdemargr" %% "gql-client" % "0.3.5",
libraryDependencies += "io.github.valdemargr" %% "gql-client-http4s" % "0.3.5",

// shared
libraryDependencies += "io.github.valdemargr" %% "gql-graphqlws" % "0.3.5",

// project/plugins.sbt
addSbtPlugin("io.github.valdemargr" % "gql-client-codegen-sbt" % "0.3.5")
// and in build.sbt
myBuild
.enablePlugins(GqlCodeGenPlugin)
- + \ No newline at end of file diff --git a/docs/server/execution/planning/index.html b/docs/server/execution/planning/index.html index 144fc141..3510fdd7 100644 --- a/docs/server/execution/planning/index.html +++ b/docs/server/execution/planning/index.html @@ -4,7 +4,7 @@ Planning | gql - + @@ -17,9 +17,9 @@ Furthermore, most problems will have less than n plans.

The planner will always generate the largest batches first, hence the "locally greedy" ordering.

Trivially schedulable nodes are always scheduled first if possible; a pruning rules makes sure of this. For a given scheduleable node, if no other un-scheduled node exists of the same family (excluding it's own descendants), then that node's only and optimal batch is the singleton batch containing only that node.

There are other pruning rules that have been considered, but don't seem necessary for practical problems since most problems produce very few plans.

One such pruning rule consideres "optimal" generated batch combinations. If the largest batch that the planner can generate (nn)n \choose n contains nodes that all have the same "latest ending parent", then all other combinations (nk) where k<n{n \choose k} \text{ where } k < n are trivially fruitless.

Once the planner has constructed a lazy list of batches, it then consideres every plan that could exist for every batch, hence a computational difficulty of finding the best plan.

info

If you want to understand the algorithm better, consider taking a look at the source code.

Converting a query to a problem​

gql considers only resolvers when running query planning. -Every field that is traversed in a query is expanded to all the resolvers it consists such that it becomes a digraph.

As an example, consider the following instance:

import gql._
import gql.dsl.all._
import gql.ast._
import gql.server.planner._
import gql.resolver._
import scala.concurrent.duration._
import cats.implicits._
import cats.effect._
import cats.effect.unsafe.implicits.global

case object Child

def wait[I](ms: Int) = Resolver.effect[IO, I](_ => IO.sleep(50.millis))

val schem = Schema.stateful{
Resolver.batch[IO, Unit, Int](_ => IO.sleep(10.millis) as Map(() -> 42)).flatMap{ b1 =>
Resolver.batch[IO, Unit, String](_ => IO.sleep(15.millis) as Map(() -> "42")).map{ b2 =>
implicit lazy val child: Type[IO, Child.type] = builder[IO, Child.type]{ b =>
b.tpe(
"Child",
"b1" -> b.from(wait(50) andThen b1.opt map (_.get)),
"b2" -> b.from(wait(100) andThen b2.opt map (_.get)),
)
}

SchemaShape.unit[IO](
builder[IO, Unit]{ b =>
b.fields(
"child" -> b.from(wait(42) as Child),
"b2" -> b.from(wait(25) andThen b2.opt map (_.get))
)
}
)
}
}
}.unsafeRunSync()

Now let's define our query and modify our schema so the planner logs:

val qry = """
query {
child {
b1
b2
}
b2
}
"""

val withLoggedPlanner = schem.copy(planner = new Planner[IO] {
def plan(naive: NodeTree): IO[OptimizedDAG] =
schem.planner.plan(naive).map { output =>
println(output.show(ansiColors = false))
println(s"naive: ${output.totalCost}")
println(s"optimized: ${output.optimizedCost}")
output
}
})

And we plan for it inspect the result:

def runQry() = {
Compiler[IO]
.compile(withLoggedPlanner, qry)
.traverse_{ case Application.Query(fa) => fa }
.unsafeRunSync()
}

runQry()
// name: Query_child.compose-left.compose-right.compose-right, cost: 100.00, end: 100.00, batch: 5
// name: Child_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 2
// name: batch_1, cost: 100.00, end: 300.00, batch: 4
// name: Child_b1.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 3
// name: batch_0, cost: 100.00, end: 300.00, batch: 1
// name: Query_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 100.00, batch: 0
// name: batch_1, cost: 100.00, end: 200.00, batch: 4
// >>>>>>>>>>>>>name: batch_1, cost: 100.00, end: 300.00, batch: 4
//
// naive: 700.0
// optimized: 600.0

We can warm up the weights (statistics) a bit by running the query a few times:

(0 to 10).toList.foreach(_ => runQry())

Now we can see how the weights are assigned:

runQry()
// name: Query_child.compose-left.compose-right.compose-right, cost: 50261.91, end: 50261.91, batch: 4
// name: Child_b2.compose-left.compose-left.compose-right, cost: 50216.00, end: 100477.91, batch: 5
// name: batch_1, cost: 15171.46, end: 115649.37, batch: 0
// name: Child_b1.compose-left.compose-left.compose-right, cost: 50217.82, end: 100479.73, batch: 3
// name: batch_0, cost: 10190.82, end: 110670.55, batch: 2
// name: Query_b2.compose-left.compose-left.compose-right, cost: 50276.73, end: 50276.73, batch: 1
// name: batch_1, cost: 15171.46, end: 65448.19, batch: 0
// >>>>>>>>>>>>>>>>>name: batch_1, cost: 15171.46, end: 115649.37, batch: 0
//
// naive: 241506.1818181818
// optimized: 226334.72727272726

Plans can also be shown nicely in a terminal with ANSI colors: +Every field that is traversed in a query is expanded to all the resolvers it consists such that it becomes a digraph.

As an example, consider the following instance:

import gql._
import gql.dsl.all._
import gql.ast._
import gql.server.planner._
import gql.resolver._
import scala.concurrent.duration._
import cats.implicits._
import cats.effect._
import cats.effect.unsafe.implicits.global

case object Child

def wait[I](ms: Int) = Resolver.effect[IO, I](_ => IO.sleep(50.millis))

val schem = Schema.stateful{
Resolver.batch[IO, Unit, Int](_ => IO.sleep(10.millis) as Map(() -> 42)).flatMap{ b1 =>
Resolver.batch[IO, Unit, String](_ => IO.sleep(15.millis) as Map(() -> "42")).map{ b2 =>
implicit lazy val child: Type[IO, Child.type] = builder[IO, Child.type]{ b =>
b.tpe(
"Child",
"b1" -> b.from(wait(50) andThen b1.opt map (_.get)),
"b2" -> b.from(wait(100) andThen b2.opt map (_.get)),
)
}

SchemaShape.unit[IO](
builder[IO, Unit]{ b =>
b.fields(
"child" -> b.from(wait(42) as Child),
"b2" -> b.from(wait(25) andThen b2.opt map (_.get))
)
}
)
}
}
}.unsafeRunSync()

Now let's define our query and modify our schema so the planner logs:

val qry = """
query {
child {
b1
b2
}
b2
}
"""

val withLoggedPlanner = schem.copy(planner = new Planner[IO] {
def plan(naive: NodeTree): IO[OptimizedDAG] =
schem.planner.plan(naive).map { output =>
println(output.show(ansiColors = false))
println(s"naive: ${output.totalCost}")
println(s"optimized: ${output.optimizedCost}")
output
}
})

And we plan for it inspect the result:

def runQry() = {
Compiler[IO]
.compile(withLoggedPlanner, qry)
.traverse_{ case Application.Query(fa) => fa }
.unsafeRunSync()
}

runQry()
// name: Query_child.compose-left.compose-right.compose-right, cost: 100.00, end: 100.00, batch: 5
// name: Child_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 2
// name: batch_1, cost: 100.00, end: 300.00, batch: 4
// name: Child_b1.compose-left.compose-left.compose-right, cost: 100.00, end: 200.00, batch: 3
// name: batch_0, cost: 100.00, end: 300.00, batch: 1
// name: Query_b2.compose-left.compose-left.compose-right, cost: 100.00, end: 100.00, batch: 0
// name: batch_1, cost: 100.00, end: 200.00, batch: 4
// >>>>>>>>>>>>>name: batch_1, cost: 100.00, end: 300.00, batch: 4
//
// naive: 700.0
// optimized: 600.0

We can warm up the weights (statistics) a bit by running the query a few times:

(0 to 10).toList.foreach(_ => runQry())

Now we can see how the weights are assigned:

runQry()
// name: Query_child.compose-left.compose-right.compose-right, cost: 50207.73, end: 50207.73, batch: 2
// name: Child_b2.compose-left.compose-left.compose-right, cost: 50157.91, end: 100365.64, batch: 3
// name: batch_1, cost: 15154.46, end: 115520.10, batch: 0
// name: Child_b1.compose-left.compose-left.compose-right, cost: 50160.10, end: 100367.82, batch: 4
// name: batch_0, cost: 10156.19, end: 110524.00, batch: 1
// name: Query_b2.compose-left.compose-left.compose-right, cost: 50216.10, end: 50216.10, batch: 5
// name: batch_1, cost: 15154.46, end: 65370.55, batch: 0
// >>>>>>>>>>>>>>>>>name: batch_1, cost: 15154.46, end: 115520.10, batch: 0
//
// naive: 241206.9090909091
// optimized: 226052.45454545456

Plans can also be shown nicely in a terminal with ANSI colors: Terminal output

- + \ No newline at end of file diff --git a/docs/server/execution/statistics/index.html b/docs/server/execution/statistics/index.html index 337ee716..7c2c7007 100644 --- a/docs/server/execution/statistics/index.html +++ b/docs/server/execution/statistics/index.html @@ -4,14 +4,14 @@ Statistics | gql - +

Statistics

An instance of Statistics captures the runtime statistics of resolvers. The Statistics structure uses an online linear regression algorithm to compute the relationship between batch size and execution time, such that memory usage is minimal.

The Statistics object records a mapping from String to:

  • count: the number of points the regression contains.
  • meanX: the mean x coordinate of all the points.
  • meanY: the mean y coordinate of all the points.
  • varX: the variance of the x coordinates.
  • covXY: the covariance of the x and y coordinates.

The slope of the function can be computed as covXY / varX and the intercept as meanY - slope * meanX.

The intercept acts the cost of one element while the slope is the per element cost.

The intercept is the important of the two, since it allows us to compare batch resolvers regardless of their average batch sizes.

import cats.effect._
import gql._
import scala.concurrent.duration._
import cats.effect.unsafe.implicits.global

Statistics[IO].flatMap{ stats =>
stats.updateStats("foo", 1.millis, 1) >>
stats.updateStats("foo", 2.millis, 4) >>
stats.updateStats("foo", 3.millis, 7) >>
stats.updateStats("foo", 4.millis, 10) >>
stats.getStats("foo")
}.unsafeRunSync()
// res0: Statistics.Stats = Stats(
// initialCost = 1000.0,
// extraElementCost = 333.3333333333333
// )
- + \ No newline at end of file diff --git a/docs/server/integrations/goi/index.html b/docs/server/integrations/goi/index.html index 7115b55f..cc4b53e7 100644 --- a/docs/server/integrations/goi/index.html +++ b/docs/server/integrations/goi/index.html @@ -4,7 +4,7 @@ Global object identification | gql - + @@ -12,7 +12,7 @@

Global object identification

gql also supports global object identification.

info

Global object identification is primarily used by Relay clients to refetch objects.

Global object identification requires two things:

  1. An id field on the object type.
  2. A node field on the query type to look objects up.

Codecs​

gql's global object identification (goi) module introduces a codec type IDCodec[A] decodes an array of strings into some type A and encodes an A into an array of strings.

import cats.implicits._
import gql._
import gql.goi._
import gql.goi.codec

final case class UserId(
id1: String,
id2: Int
)

val userIdCodec: IDCodec[UserId] = (codec.string *: codec.int).to[UserId]
info

The *: composition syntax is provided on top of the twiddles library to map tuples to and from case classes. Consider taking a look at the twiddles documentation

You won't be calling the encode and decode functions explicitly, but now that we have a codec for our UserId, let's try it out.

val encoded = userIdCodec.encode(UserId("abc", 123)).mkString_(":")
// encoded: String = "abc:123"
val decoded = userIdCodec.decode(encoded.split(":"))
// decoded: cats.data.package.ValidatedNec[String, UserId] = Valid(
// a = UserId(id1 = "abc", id2 = 123)
// )

Optional fields can also be modeled with the opt method:

final case class WithOpt(id1: String, id2: Option[String])

lazy val c = (codec.string *: codec.string.opt).to[WithOpt]
c.encode(WithOpt("abc", Some("def"))).mkString_(":")
// res0: String = "abc:def"
c.encode(WithOpt("abc", None)).mkString_(":")
// res1: String = "abc:null"

Codecs can also handle errors.

import java.util.UUID
lazy val uuidCodec = codec.string.eimap[UUID](
str => Either.catchNonFatal(UUID.fromString(str)).leftMap(_ => s"Invalid UUID '$str'"),
)(_.toString())

uuidCodec.decode(Array("abc"))
// res2: cats.data.package.ValidatedNec[String, UUID] = Invalid(
// e = Singleton(a = "Invalid UUID 'abc'")
// )

Schema builder dsl​

GOI provides a dsl when building an object or interface that requires global object identification. To add goi to a Type[F, A] you must provide:

  • A function A => B where B has a IDCodec instance.
  • A function NonEmptyList[B] => F[Map[B, A]] that can fetch items if requested through the node field.
import gql.ast._
import gql.dsl.all._
import gql.goi.dsl._
import cats.effect._
import cats.data._

final case class MyId(id: String)
object MyId {
implicit lazy val myIdCodec: IDCodec[MyId] = codec.string.to[MyId]
}

final case class MyData(id: MyId, name: String)
def getData(id: MyId): IO[Option[MyData]] = IO.pure(Some(MyData(id, "name")))

implicit val myData: Type[IO, MyData] = tpe[IO, MyData](
"MyData",
"name" -> lift(_.name)
).goi(_.id) { keys: NonEmptyList[MyId] =>
keys.toList
.traverse(k => getData(k).map(k -> _))
.map(_.collect{ case (k, Some(v)) => k -> v }.toMap)
}

Once you are done declaring all of your types, you must accumulate a list of global object id's that the node field can fetch.

Gql's ast is extensible with user definable attributes, so we can introspect the schema to find all of the goi information we need.

import cats.effect.unsafe.implicits.global
import io.circe.syntax._

def schemaWithGoi: IO[Schema[IO, Unit, Unit, Unit]] = Schema.simple {
Goi.addSchemaGoi(SchemaShape.unit[IO](fields("data" -> eff(_ => getData(MyId("abc"))))))
}

def runWith(id: String) = {
def compiled = schemaWithGoi.map{ schema =>
Compiler[IO].compile(
schema,
s"""
query {
node(id: "$id") {
... on MyData {
id
name
}
}
}
"""
)
}

compiled
.flatMap(_.traverse{ case Application.Query(fa) => fa })
.unsafeRunSync()
.toOption
.get.asJson.spaces2
}

def makeId(str: String) = new String(java.util.Base64.getEncoder.encode(str.getBytes()))

runWith(makeId("MyData:abc"))
// res3: String = """{
// "data" : {
// "node" : [
// {
// "id" : "TXlEYXRhOmFiYw==",
// "name" : "name"
// }
// ]
// }
// }"""

runWith(makeId(""))
// res4: String = """{
// "data" : {
// "node" : null
// },
// "errors" : [
// {
// "message" : "Empty id",
// "path" : [
// "node"
// ]
// }
// ]
// }"""

runWith(makeId("Other"))
// res5: String = """{
// "data" : {
// "node" : null
// },
// "errors" : [
// {
// "message" : "Typename `Other` does not exist in this schema.",
// "path" : [
// "node"
// ]
// }
// ]
// }"""

runWith(makeId("Query"))
// res6: String = """{
// "data" : {
// "node" : null
// },
// "errors" : [
// {
// "message" : "Typename `Query` does not have a global object identitifaction defined for it.",
// "path" : [
// "node"
// ]
// }
// ]
// }"""

runWith(makeId("MyData:abc:extra"))
// res7: String = """{
// "data" : {
// "node" : null
// },
// "errors" : [
// {
// "message" : "Invalid Global object identifier size. Expected 2 id parts seperated by :, but got 3 parts. The types must be typename(string):string. The provided id parts were MyData:abc:extra.",
// "path" : [
// "node"
// ]
// }
// ]
// }"""
- + \ No newline at end of file diff --git a/docs/server/integrations/graphqlws/index.html b/docs/server/integrations/graphqlws/index.html index e7b01060..2eb08c89 100644 --- a/docs/server/integrations/graphqlws/index.html +++ b/docs/server/integrations/graphqlws/index.html @@ -4,14 +4,14 @@ GraphQL-WS | gql - +

GraphQL-WS

gql contains a spec-compliant GraphQL over WebSocket protocol implementation. The implementation requres a compiler implementation and returns a stream of messages to send to the client and a handler for messages that originate from the client.

The GraphQL-WS implementation shouldn't usually be used directly, but rather through a websocket-supported http server implementation, like http4s.

- + \ No newline at end of file diff --git a/docs/server/integrations/http4s/index.html b/docs/server/integrations/http4s/index.html index 59acb7c7..e182c465 100644 --- a/docs/server/integrations/http4s/index.html +++ b/docs/server/integrations/http4s/index.html @@ -4,7 +4,7 @@ Http4s | gql - + @@ -18,7 +18,7 @@ The structure of this json payload is not constrained; this is up to the application to consider.

However, the norm is to embed http headers in the payload:

{
"authorization": "Bearer aHR0cHM6Ly93d3cueW91dHViZS5jb20vd2F0Y2g/dj1kUXc0dzlXZ1hjUQ=="
}

Returning Left of the query handler function lets the application return an error message to the client, which also immideately closes the websocket. One can embed errors such as unauthorized here. The GraphQL over websocket protocol defines no way to communicate arbitary information without closing the connection.

The websocket route also comes in a trait flavor:

import org.http4s.server.websocket.WebSocketBuilder
import io.circe._

def wsb: WebSocketBuilder[IO] = ???

val wsh = new WSHandler[IO] {
type A = Creds

def preParsing(headers: Map[String, Json]): IO[Either[String, A]] =
headers.get("authorization") match {
case None => IO(Left("missing authorization header"))
case Some(a) =>
a.asString match {
case None => IO(Left("authorization token must be a string"))
case Some(a) => authorize(a).map{
case None => Left("invalid token")
case Some(creds) => Right(creds)
}
}
}

def compile(params: QueryParameters, value: A): Resource[IO, Compiler.Outcome[IO]] =
Resource.pure(authorizeApp(Compiler[AuthIO].compileWith(schema, params), value))
}

def wsRoutes: HttpRoutes[IO] = Http4sRoutes.wsHandler[IO](wsh, wsb)
- + \ No newline at end of file diff --git a/docs/server/integrations/natchez/index.html b/docs/server/integrations/natchez/index.html index 98217507..02e9a03d 100644 --- a/docs/server/integrations/natchez/index.html +++ b/docs/server/integrations/natchez/index.html @@ -4,14 +4,14 @@ Natchez (tracing) | gql - +

Natchez (tracing)

The natchez package provides functions to trace your query execution and planning.

The tracing functions include information such as the query plan and query in case of an invalid query.

The easiest way to add tracing to your app is by tracing the schema via traceSchema and incoming queries via traceQuery. For instance, consider the following tracing implementation for a http server:

import natchez._
import gql._
import gql.natchez.NatchezTracer
import cats.effect.{Trace => _, _}
import gql.http4s.Http4sRoutes

implicit def trace: Trace[IO] = ???

def schema: Schema[IO, Unit, Unit, Unit] = ???

def tracedSchema = NatchezTracer.traceSchema(schema)

def traceAndRunHttpRequest(qp: QueryParameters) =
NatchezTracer.traceQuery[IO](qp.query, qp.variables.getOrElse(Map.empty), qp.operationName)(
Compiler[IO].compileWith(tracedSchema, qp)
)

def routes = Http4sRoutes.syncSimple[IO](traceAndRunHttpRequest(_).map(Right(_)))
- + \ No newline at end of file diff --git a/docs/server/integrations/relational/index.html b/docs/server/integrations/relational/index.html index 0eb2bfed..2e20ce22 100644 --- a/docs/server/integrations/relational/index.html +++ b/docs/server/integrations/relational/index.html @@ -4,7 +4,7 @@ Relational | gql - + @@ -18,7 +18,7 @@ Most use-cases are covered by simply invoking the join method with the proper multiplicity parameter.

When your AST is inspected to build a query, a recursive AST walk composes a big reassociation function that can translate flat query results into the proper hierarchical structure. This composed function also tracks the visited columns and their decoders.

The query algebra has a special operation that lets the caller modify the state however they wish. The dsl uses this state modification for various tasks, such as providing a convinient join method that both joins a table and performs the proper reassociation of results. -Consider the following example that joins a table more explicitly.

val q1 = for {
ht <- homeTable.simpleJoin(_ => void"true")
_ <- reassociate[List](ht.tableKey)
// some other reassociation criteria
_ <- reassociate[Option](select(int4, void"42"))
} yield ht
// q1: algebra.Query[[X]List[Option[X]], HomeTable] = FlatMap(
// fa = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@40921696)),
// f = gql.relational.QueryDsl$$Lambda$13992/0x0000000803808040@3d61fd2
// ),
// f = <function1>
// )

// we can perform reassociation before performing the actions in 'q1'
val q2 = reassociate[Option](select(text, void"'john doe'")).flatMap(_ => q1)
// q2: algebra.Query[[X]Option[List[Option[X]]], HomeTable] = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@37334eb5)),
// f = <function1>
// )

// we can also change the result structure after performing the actions in 'q2'
q2.mapK[List](new (λ[X => Option[List[Option[X]]]] ~> List) {
def apply[A](fa: Option[List[Option[A]]]): List[A] = fa.toList.flatten.flatMap(_.toList)
})
// res4: algebra.Query[List, HomeTable] = LiftEffect(
// fa = EitherT(value = cats.data.IndexedStateT@6d3f78d)
// )

Accessing the lowlevel state also lets the user perform other tasks such as unique id (new alias) generation.

for {
alias1 <- newAlias
alias2 <- newAlias
} yield ()
// res5: algebra.Query[[X]X, Unit] = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@56a57767)),
// f = <function1>
// )

Implementing your own integration​

The entire dsl and query compiler is available if you implement a couple of methods.

Here is the full skunk integration.

import _root_.{skunk => sk}
object MyIntegration extends QueryAlgebra {
// What is a fragment
type Frag = sk.AppliedFragment
// How do we transform a string into a fragment
def stringToFrag(s: String): Frag = sql"#${s}".apply(Void)
// Combine and create empty fragments
implicit def appliedFragmentMonoid: Monoid[Frag] = sk.AppliedFragment.MonoidAppFragment
// How do we decode values
type Decoder[A] = sk.Decoder[A]
// How can we combine decoders
implicit def applicativeForDecoder: Applicative[Decoder] = Decoder.ApplicativeDecoder
// How do we make an optional decoder
def optDecoder[A](d: Decoder[A]): Decoder[Option[A]] = d.opt
// What is needed to perform a query
type Connection[F[_]] = Resource[F, Session[F]]
// Given a connection, how do we use it
implicit def skunkQueryable[F[_]: MonadCancelThrow]: Queryable[F] = new Queryable[F] {
def apply[A](query: AppliedFragment, decoder: Decoder[A], connection: Connection[F]): F[List[A]] =
connection.use(_.execute(query.fragment.query(decoder))(query.argument))
}
}

The dsl can be instantiated for any query algebra.

object myDsl extends QueryDsl(MyIntegration)

you can also add integration specific methods to your dsl.

object myDsl extends QueryDsl(MyIntegration) {
def someOperationSpecificToMyIntegration = ???
}

Adding arguments​

All field combinators allow arguments to be provided naturally, regardless of where the field is in the query.

implicit lazy val pt: Type[IO, QueryContext[PersonTable]] = ???

tpe[IO, QueryContext[HomeTable]](
"HomeTable",
"people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>
for {
hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")
p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))
} yield p
}
)

Sum types​

Sum types can naturally be declared also.

Lets set up some tables for sum types.
connection.use{ ses =>
val queries = List(
sql"drop table if exists owner",
sql"drop table if exists dog",
sql"drop table if exists cat",
sql"""create table owner (
id int4 primary key
)""",
sql"""create table dog (
id int4 primary key,
owner_id int4 not null,
name text not null,
age int not null
)""",
sql"""create table cat (
id int4 primary key,
owner_id int4 not null,
name text not null,
age int not null
)""",
sql"""insert into owner (id) values (1)""",
sql"""insert into owner (id) values (2)""",
sql"""insert into dog (id, owner_id, name, age) values (1, 1, 'Dog', 42)""",
sql"""insert into cat (id, owner_id, name, age) values (2, 2, 'Cat', 22)""",
)

queries.traverse(x => ses.execute(x.command))
}.unsafeRunSync()
// res7: List[<none>.<root>.skunk.data.Completion] = List(
// DropTable,
// DropTable,
// DropTable,
// CreateTable,
// CreateTable,
// CreateTable,
// Insert(count = 1),
// Insert(count = 1),
// Insert(count = 1),
// Insert(count = 1)
// )

And now we can run it.

sealed trait Animal { 
def name: String
}
case class Dog(owner: String, name: String, age: Int) extends Animal
case class Cat(owner: String, name: String, age: Int) extends Animal

trait OwnerTable extends SkunkTable {
def table = void"owner"
val (idCol, id) = sel("id", int4)
def tableKey = id
}
case class OwnerTableUnion(alias: String) extends OwnerTable
case class OwnerTableInterface(alias: String) extends OwnerTable
val ownerTableUnion = skunkTable(OwnerTableUnion)
// ownerTableUnion: SkunkTableAlg[OwnerTableUnion] = gql.relational.skunk.dsl$$anon$2@653edaee
val ownerTableInterface = skunkTable(OwnerTableInterface)
// ownerTableInterface: SkunkTableAlg[OwnerTableInterface] = gql.relational.skunk.dsl$$anon$2@48d5661a

case class DogTable(alias: String) extends SkunkTable {
def table = void"dog"

val (idCol, id) = sel("id", int4)
val (ownerCol, owner) = sel("owner_id", int4)
val (nameCol, name) = sel("name", text)
val (ageCol, age) = sel("age", int4)

def tableKey = id
}
val dogTable = skunkTable(DogTable)
// dogTable: SkunkTableAlg[DogTable] = gql.relational.skunk.dsl$$anon$2@51239f12

case class CatTable(alias: String) extends SkunkTable {
def table = void"cat"

val (idCol, id) = sel("id", int4)
val (ownerCol, owner) = sel("owner_id", int4)
val (nameCol, name) = sel("name", text)
val (ageCol, age) = sel("age", int4)

def tableKey = id
}
val catTable = skunkTable(CatTable)
// catTable: SkunkTableAlg[CatTable] = gql.relational.skunk.dsl$$anon$2@2c40d41e

implicit lazy val animalInterface = interface[IO, QueryContext[OwnerTableInterface]](
"AnimalInterface",
"owner" -> abst[IO, String]
)

implicit lazy val cat = tpe[IO, QueryContext[CatTable]](
"Cat",
"owner" -> query(_.owner),
"name" -> query(_.name),
"age" -> query(_.age)
).contImplements[OwnerTableInterface]{ owner =>
catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}")
}

implicit lazy val dog = tpe[IO, QueryContext[DogTable]](
"Dog",
"owner" -> query(_.owner),
"name" -> query(_.name),
"age" -> query(_.age)
).contImplements[OwnerTableInterface]{ owner =>
dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}")
}

// we use the builder to create a union type
implicit lazy val animal = relBuilder[IO, OwnerTableUnion] { b =>
b
.union("Animal")
.contVariant(owner => dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}"))
.contVariant(owner => catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}"))
}

def schema = gql.Schema.query(
tpe[IO, Unit](
"Query",
"animals" -> runFieldSingle(connection) { (_: Unit) =>
ownerTableUnion.join[List](_ => sql"true")
},
"animalInterfaces" -> runFieldSingle(connection) { (_: Unit) =>
ownerTableInterface.join[List](_ => sql"true")
}
)
)

def animalQuery = """
query {
animals {
__typename
... on Dog {
owner
name
age
}
... on Cat {
owner
name
age
}
}
animalInterfaces {
__typename
... on Dog {
owner
name
age
}
... on Cat {
owner
name
age
}
}
}
"""

schema
.map(Compiler[IO].compile(_, animalQuery))
.flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }
.unsafeRunSync()
// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id
// from owner as t1
// left join dog as t2 on t1.id = t2.owner_id
// left join cat as t3 on t1.id = t3.owner_id
// where true
// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id
// from owner as t1
// left join dog as t2 on t1.id = t2.owner_id
// left join cat as t3 on t1.id = t3.owner_id
// where true
// res8: String = """{
// "data" : {
// "animalInterfaces" : [
// {
// "__typename" : "Cat",
// "age" : 22,
// "name" : "Cat",
// "owner" : 2
// },
// {
// "__typename" : "Dog",
// "age" : 42,
// "name" : "Dog",
// "owner" : 1
// }
// ],
// "animals" : [
// {
// "__typename" : "Cat",
// "age" : 22,
// "name" : "Cat",
// "owner" : 2
// },
// {
// "__typename" : "Dog",
// "age" : 42,
// "name" : "Dog",
// "owner" : 1
// }
// ]
// }
// }"""

Declaring complex subqueries​

Sometimes your tables must have complex filtering, limiting, ordering and so on. +Consider the following example that joins a table more explicitly.

val q1 = for {
ht <- homeTable.simpleJoin(_ => void"true")
_ <- reassociate[List](ht.tableKey)
// some other reassociation criteria
_ <- reassociate[Option](select(int4, void"42"))
} yield ht
// q1: algebra.Query[[X]List[Option[X]], HomeTable] = FlatMap(
// fa = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@625f3240)),
// f = gql.relational.QueryDsl$$Lambda$13935/0x00000008039a7040@2ccab511
// ),
// f = <function1>
// )

// we can perform reassociation before performing the actions in 'q1'
val q2 = reassociate[Option](select(text, void"'john doe'")).flatMap(_ => q1)
// q2: algebra.Query[[X]Option[List[Option[X]]], HomeTable] = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@612fb68)),
// f = <function1>
// )

// we can also change the result structure after performing the actions in 'q2'
q2.mapK[List](new (λ[X => Option[List[Option[X]]]] ~> List) {
def apply[A](fa: Option[List[Option[A]]]): List[A] = fa.toList.flatten.flatMap(_.toList)
})
// res4: algebra.Query[List, HomeTable] = LiftEffect(
// fa = EitherT(value = cats.data.IndexedStateT@6e7c90ca)
// )

Accessing the lowlevel state also lets the user perform other tasks such as unique id (new alias) generation.

for {
alias1 <- newAlias
alias2 <- newAlias
} yield ()
// res5: algebra.Query[[X]X, Unit] = FlatMap(
// fa = LiftEffect(fa = EitherT(value = cats.data.IndexedStateT@752a7277)),
// f = <function1>
// )

Implementing your own integration​

The entire dsl and query compiler is available if you implement a couple of methods.

Here is the full skunk integration.

import _root_.{skunk => sk}
object MyIntegration extends QueryAlgebra {
// What is a fragment
type Frag = sk.AppliedFragment
// How do we transform a string into a fragment
def stringToFrag(s: String): Frag = sql"#${s}".apply(Void)
// Combine and create empty fragments
implicit def appliedFragmentMonoid: Monoid[Frag] = sk.AppliedFragment.MonoidAppFragment
// How do we decode values
type Decoder[A] = sk.Decoder[A]
// How can we combine decoders
implicit def applicativeForDecoder: Applicative[Decoder] = Decoder.ApplicativeDecoder
// How do we make an optional decoder
def optDecoder[A](d: Decoder[A]): Decoder[Option[A]] = d.opt
// What is needed to perform a query
type Connection[F[_]] = Resource[F, Session[F]]
// Given a connection, how do we use it
implicit def skunkQueryable[F[_]: MonadCancelThrow]: Queryable[F] = new Queryable[F] {
def apply[A](query: AppliedFragment, decoder: Decoder[A], connection: Connection[F]): F[List[A]] =
connection.use(_.execute(query.fragment.query(decoder))(query.argument))
}
}

The dsl can be instantiated for any query algebra.

object myDsl extends QueryDsl(MyIntegration)

you can also add integration specific methods to your dsl.

object myDsl extends QueryDsl(MyIntegration) {
def someOperationSpecificToMyIntegration = ???
}

Adding arguments​

All field combinators allow arguments to be provided naturally, regardless of where the field is in the query.

implicit lazy val pt: Type[IO, QueryContext[PersonTable]] = ???

tpe[IO, QueryContext[HomeTable]](
"HomeTable",
"people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>
for {
hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")
p <- personTable.join(p => sql"${hp.personCol} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))
} yield p
}
)

Sum types​

Sum types can naturally be declared also.

Lets set up some tables for sum types.
connection.use{ ses =>
val queries = List(
sql"drop table if exists owner",
sql"drop table if exists dog",
sql"drop table if exists cat",
sql"""create table owner (
id int4 primary key
)""",
sql"""create table dog (
id int4 primary key,
owner_id int4 not null,
name text not null,
age int not null
)""",
sql"""create table cat (
id int4 primary key,
owner_id int4 not null,
name text not null,
age int not null
)""",
sql"""insert into owner (id) values (1)""",
sql"""insert into owner (id) values (2)""",
sql"""insert into dog (id, owner_id, name, age) values (1, 1, 'Dog', 42)""",
sql"""insert into cat (id, owner_id, name, age) values (2, 2, 'Cat', 22)""",
)

queries.traverse(x => ses.execute(x.command))
}.unsafeRunSync()
// res7: List[<none>.<root>.skunk.data.Completion] = List(
// DropTable,
// DropTable,
// DropTable,
// CreateTable,
// CreateTable,
// CreateTable,
// Insert(count = 1),
// Insert(count = 1),
// Insert(count = 1),
// Insert(count = 1)
// )

And now we can run it.

sealed trait Animal { 
def name: String
}
case class Dog(owner: String, name: String, age: Int) extends Animal
case class Cat(owner: String, name: String, age: Int) extends Animal

trait OwnerTable extends SkunkTable {
def table = void"owner"
val (idCol, id) = sel("id", int4)
def tableKey = id
}
case class OwnerTableUnion(alias: String) extends OwnerTable
case class OwnerTableInterface(alias: String) extends OwnerTable
val ownerTableUnion = skunkTable(OwnerTableUnion)
// ownerTableUnion: SkunkTableAlg[OwnerTableUnion] = gql.relational.skunk.dsl$$anon$2@cb123fe
val ownerTableInterface = skunkTable(OwnerTableInterface)
// ownerTableInterface: SkunkTableAlg[OwnerTableInterface] = gql.relational.skunk.dsl$$anon$2@1de940bb

case class DogTable(alias: String) extends SkunkTable {
def table = void"dog"

val (idCol, id) = sel("id", int4)
val (ownerCol, owner) = sel("owner_id", int4)
val (nameCol, name) = sel("name", text)
val (ageCol, age) = sel("age", int4)

def tableKey = id
}
val dogTable = skunkTable(DogTable)
// dogTable: SkunkTableAlg[DogTable] = gql.relational.skunk.dsl$$anon$2@1719790c

case class CatTable(alias: String) extends SkunkTable {
def table = void"cat"

val (idCol, id) = sel("id", int4)
val (ownerCol, owner) = sel("owner_id", int4)
val (nameCol, name) = sel("name", text)
val (ageCol, age) = sel("age", int4)

def tableKey = id
}
val catTable = skunkTable(CatTable)
// catTable: SkunkTableAlg[CatTable] = gql.relational.skunk.dsl$$anon$2@31860852

implicit lazy val animalInterface = interface[IO, QueryContext[OwnerTableInterface]](
"AnimalInterface",
"owner" -> abst[IO, String]
)

implicit lazy val cat = tpe[IO, QueryContext[CatTable]](
"Cat",
"owner" -> query(_.owner),
"name" -> query(_.name),
"age" -> query(_.age)
).contImplements[OwnerTableInterface]{ owner =>
catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}")
}

implicit lazy val dog = tpe[IO, QueryContext[DogTable]](
"Dog",
"owner" -> query(_.owner),
"name" -> query(_.name),
"age" -> query(_.age)
).contImplements[OwnerTableInterface]{ owner =>
dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}")
}

// we use the builder to create a union type
implicit lazy val animal = relBuilder[IO, OwnerTableUnion] { b =>
b
.union("Animal")
.contVariant(owner => dogTable.join[Option](dog => sql"${owner.idCol} = ${dog.ownerCol}"))
.contVariant(owner => catTable.join[Option](cat => sql"${owner.idCol} = ${cat.ownerCol}"))
}

def schema = gql.Schema.query(
tpe[IO, Unit](
"Query",
"animals" -> runFieldSingle(connection) { (_: Unit) =>
ownerTableUnion.join[List](_ => sql"true")
},
"animalInterfaces" -> runFieldSingle(connection) { (_: Unit) =>
ownerTableInterface.join[List](_ => sql"true")
}
)
)

def animalQuery = """
query {
animals {
__typename
... on Dog {
owner
name
age
}
... on Cat {
owner
name
age
}
}
animalInterfaces {
__typename
... on Dog {
owner
name
age
}
... on Cat {
owner
name
age
}
}
}
"""

schema
.map(Compiler[IO].compile(_, animalQuery))
.flatMap { case Right(Application.Query(run)) => run.map(_.handleErrors{e => println(e.getMessage()); ""}.asJson.spaces2) }
.unsafeRunSync()
// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id
// from owner as t1
// left join dog as t2 on t1.id = t2.owner_id
// left join cat as t3 on t1.id = t3.owner_id
// where true
// select t1.id, t2.id, t2.age, t2.name, t2.owner_id, t3.id, t3.age, t3.name, t3.owner_id
// from owner as t1
// left join dog as t2 on t1.id = t2.owner_id
// left join cat as t3 on t1.id = t3.owner_id
// where true
// res8: String = """{
// "data" : {
// "animalInterfaces" : [
// {
// "__typename" : "Cat",
// "age" : 22,
// "name" : "Cat",
// "owner" : 2
// },
// {
// "__typename" : "Dog",
// "age" : 42,
// "name" : "Dog",
// "owner" : 1
// }
// ],
// "animals" : [
// {
// "__typename" : "Cat",
// "age" : 22,
// "name" : "Cat",
// "owner" : 2
// },
// {
// "__typename" : "Dog",
// "age" : 42,
// "name" : "Dog",
// "owner" : 1
// }
// ]
// }
// }"""

Declaring complex subqueries​

Sometimes your tables must have complex filtering, limiting, ordering and so on. The most obvious way to declare such parameters is simply to use a subquery.

case class ParameterizedPersonTable(alias: String, table: AppliedFragment) extends SkunkTable {
val (idCol, id) = sel("id", int4)
val (nameCol, name) = sel("name", text)
val (ageCol, age) = sel("age", int4)

def tableKey = id
}
def parameterizedPersonTable(
limitOffset: Option[(Int, Int)],
order: Option[AppliedFragment],
filter: Option[AppliedFragment]
) = skunkTable{ alias =>
val filt = filter.foldMap(f => sql"where ${f.fragment}".apply(f.argument))
val ord = order.foldMap(f => sql"order by ${f.fragment}".apply(f.argument))
val lim =
limitOffset.foldMap{ case (limit, offset) => sql"limit ${int4} offset ${int4}".apply((limit, offset))}
ParameterizedPersonTable(
alias,
sql"""|(
| select *
| from person
| ${filt.fragment}
| ${ord.fragment}
| ${lim.fragment}
|)""".stripMargin.apply((filt.argument, ord.argument, lim.argument))
)
}

And now we can use our new table.

implicit lazy val ppt: Type[IO, QueryContext[ParameterizedPersonTable]] = ???

val personQueryArgs = (
arg[Option[Int]]("limit"),
arg[Option[Int]]("offset"),
arg[Option[Boolean]]("order"),
arg[Option[Int]]("ageFilter")
).tupled
tpe[IO, QueryContext[HomeTable]](
"HomeTable",
"people" -> cont(personQueryArgs) { case (home, (lim, off, ord, af)) =>
for {
hp <- homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}")
p <- parameterizedPersonTable(
limitOffset = (lim, off).tupled,
order = ord.map{
case true => void"age desc"
case false => void"age asc"
},
filter = af.map(age => sql"age > ${int4}".apply(age))
).join(p => sql"${hp.personCol} = ${p.idCol}")
} yield p
}
)

Using relational without tables​

There is no restriction on how you can implement a table, so you can choose your own strategy. For instance say we just wanted to declare everything up-front and select fields ad-hoc.

import gql.relational.skunk.SkunkIntegration.Query.Select

case class AdHocTable(
alias: String,
table: AppliedFragment,
tableKey: Select[?],
) extends SkunkTable

tpe[IO, QueryContext[HomeTable]](
"HomeTable",
"people" -> cont(arg[List[Int]]("ids")) { (home, ids) =>
for {
hp <- skunkTable(alias =>
AdHocTable(
alias,
sql"#${alias}.home_person".apply(Void),
select(
int4 ~ int4,
sql"#${alias}.home_id".apply(Void),
sql"#${alias}.person_id".apply(Void)
)
)
).join[List](hp => sql"${home.idCol} = ${hp.aliased(sql"home_id")}")
p <- personTable.join(p => sql"${hp.aliased(sql".person_id")} = ${p.idCol} and ${p.idCol} in (${int4.list(ids)})".apply(ids))
} yield p
}
)

Since there is no dsl for this, constructing the query is a bit gruesome. Consider if a dsl is possible for your formulation.

Running transactions​

Most usecases involve running all queries in a transaction, but none of the examples so far have introduces this. @@ -28,7 +28,7 @@ The runField method takes a list of inputs I and produces Query[G, (Select[I], B)], such that query results can be reassociated with the inputs.

def myBatchedHomeQuery(conn: Resource[IO, Session[IO]]) = {
case class MyDatatype(homeId: Int)

tpe[IO, MyDatatype](
"MyDatatype",
"home" -> runField[IO, List, MyDatatype, HomeTable](conn) { xs =>
val lst = xs.toList.map(_.homeId)
for {
ht <- homeTable.join[List](ht => sql"${ht.idCol} in (${int4.list(lst)})".apply(lst))
} yield (ht.id.fmap(MyDatatype), ht)
}
)
}

To solve the query multiplicity explosions you can use the contBoundary which works almost like cont, except the query will be split up into two queries.

The contBoundary function takes two interesting parameters. The first parameter will be a projection of the current query, decoded into B. The second parameter turns this B into another query, which will be the root of the new query.

def boundaryQuery(conn: Resource[IO, Session[IO]]) = {
case class MyDatatype(homeId: Int)

relBuilder[IO, HomeTable]{ rb =>
rb.tpe(
"HomeTable",
"people" -> rb.contBoundary(conn){ home =>
homePersonTable.join[List](hp => sql"${home.idCol} = ${hp.homeCol}").map(_.person)
}{ (xs: NonEmptyList[Int]) =>
val lst = xs.toList
personTable.join(p => sql"${p.idCol} in (${int4.list(lst)})".apply(lst)).map(p => p.id -> p)
}
)
}
}
info

The contBoundary is only available in when using the relBuilder, since type inference does not work very well.

Inference troubles with runField can also be alleviated by using the relBuilder.

- + \ No newline at end of file diff --git a/docs/server/schema/arrow_dsl/index.html b/docs/server/schema/arrow_dsl/index.html index a4bdbb48..4b314de4 100644 --- a/docs/server/schema/arrow_dsl/index.html +++ b/docs/server/schema/arrow_dsl/index.html @@ -4,23 +4,23 @@ Monadic Resolver DSL | gql - +

Monadic Resolver DSL

Modelling complex evaluation with Resolvers can be tricky. -It often involves using first to pair up an arrow's result with it's input and proceeding with map or contramap.

Gql introduces a in-language monadic arrow dsl that re-writes a monadic arrow expression into a series of map, contramap and first invocations.

info

This feature is akin to the proc notation in Haskell.

Using the notation is straightforward, the same (covariant) combinators for Resolver exist in the arrow dsl.

import gql.resolver._
import cats.implicits._
import cats.effect._
import gql.arrow._

// Bind the effect type (IO) to aid with compiler errors and inference
val d = dsl[IO]
import d._
val r: Resolver[IO, Int, String] =
proc[Int] { i: Var[Int] =>
for {
a <- i.evalMap(x => IO(x + 2))
b <- a.evalMap(x => IO(x * 3))
c <- (a, b).tupled.evalMap{ case (aa, bb) => IO(aa + bb) }
} yield c.map(_.toString)
}
Most syntatic extensions don't make much sense unless the arrow type (Resolver) is bound which requires knowing the effect type. The full monadic arrows language is available as toplevel functions also.
import gql.arrow.{Language => L}
L.proc[Resolver[IO, *, *], Int, String] { i =>
for {
x <- L.declare[Resolver[IO, *, *], Int, Int](i)(Resolver.lift[IO, Int](z => z * 2))
y <- L.declare[Resolver[IO, *, *], (Int, Int), String]((x, x).tupled)(Resolver.lift[IO, (Int, Int)]{ case (a, b) => (a + b).toString() })
} yield y
}
// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@1f4cd637

The underlying arrow is also available for composition via apply.

proc[Int] { i =>
for {
x <- i(_.evalMap(z => IO(z + 1)))
out <- x.apply(_.map(_.toString))
} yield out
}

Technical details​

The dsl introduces two datatypes, Var and Decl.

  • Var is a reference to a set of variables that occur in the arrow. Var forms an Applicative.
  • Decl is used to re-write the monadic (flatMap) structure into an arrow. Decl forms a Monad.

The primary use of Decl is to bind variables. +It often involves using first to pair up an arrow's result with it's input and proceeding with map or contramap.

Gql introduces a in-language monadic arrow dsl that re-writes a monadic arrow expression into a series of map, contramap and first invocations.

info

This feature is akin to the proc notation in Haskell.

Using the notation is straightforward, the same (covariant) combinators for Resolver exist in the arrow dsl.

import gql.resolver._
import cats.implicits._
import cats.effect._
import gql.arrow._

// Bind the effect type (IO) to aid with compiler errors and inference
val d = dsl[IO]
import d._
val r: Resolver[IO, Int, String] =
proc[Int] { i: Var[Int] =>
for {
a <- i.evalMap(x => IO(x + 2))
b <- a.evalMap(x => IO(x * 3))
c <- (a, b).tupled.evalMap{ case (aa, bb) => IO(aa + bb) }
} yield c.map(_.toString)
}
Most syntatic extensions don't make much sense unless the arrow type (Resolver) is bound which requires knowing the effect type. The full monadic arrows language is available as toplevel functions also.
import gql.arrow.{Language => L}
L.proc[Resolver[IO, *, *], Int, String] { i =>
for {
x <- L.declare[Resolver[IO, *, *], Int, Int](i)(Resolver.lift[IO, Int](z => z * 2))
y <- L.declare[Resolver[IO, *, *], (Int, Int), String]((x, x).tupled)(Resolver.lift[IO, (Int, Int)]{ case (a, b) => (a + b).toString() })
} yield y
}
// res0: Resolver[IO, Int, String] = gql.resolver.Resolver@65246a48

The underlying arrow is also available for composition via apply.

proc[Int] { i =>
for {
x <- i(_.evalMap(z => IO(z + 1)))
out <- x.apply(_.map(_.toString))
} yield out
}

Technical details​

The dsl introduces two datatypes, Var and Decl.

  • Var is a reference to a set of variables that occur in the arrow. Var forms an Applicative.
  • Decl is used to re-write the monadic (flatMap) structure into an arrow. Decl forms a Monad.

The primary use of Decl is to bind variables. Every transformation on a Variable introduces a new Variable which is stored in the Decl structure.

info

Since Var forms an Applicative that implies that map is available for Var. map for Var is not memoized since it does not lift Var into Decl. Var has an extension rmap which introduces a new Variable that memoizes the result. That is, the following equivalences holds:

declare((v: Var[A]).map(f))(Resolver.id[F, A]) <-> 
(v: Var[A]).rmap(f) <->
(v: Var[A]).apply(_.map(f))

Closures are illegal in the dsl, as they are refer to variables that are not guaranteed to be available, so prefer invoking proc once per Resolver.

println {
scala.util.Try {
proc[Int] { i =>
for {
x <- i.evalMap(x => IO(x + 2))
o <- x.andThen(proc[Int]{ _ =>
x.rmap(y => y + 2)
})
} yield o
}
}.toEither.leftMap(_.getMessage)
}
// Left(Variable closure error.
// Variable declared at arrow_dsl.md:70.
// Compilation initiated at arrow_dsl.md:68.
// Variables that were not declared in this scope may not be referenced.
// Example:
// ```
// proc[Int]{ i =>
// for {
// x <- i.apply(_.map(_ + 1))
// y <- i.apply(_.andThen(proc[Int]{ _ =>
// // referencing 'x' here is an error
// x.apply(_.map(_ + 1))
// }))
// } yield y
// }
// ```)

Builder extensions​

The dsl includes an extension method to FieldBuilder that eases construction of Fields. The dsl also enhances any resolver with a proc extension method.

import gql.ast._

val gqlDsl = gql.dsl.GqlDsl[IO]
import gqlDsl._

builder[Unit]{ b =>
b.tpe(
"MyType",
"field" -> b.proc{ i =>
for {
x <- i.evalMap(_ => IO(1 + 2))
y <- x.rmap(_ + 3)
} yield y
},
"otherField" -> b(_.proc{ i =>
i.evalMap(_ => IO(1 + 2))
})
)
}

Composition​

Sharing common sub-arrows is a desirable property. -This can is expressed naturally with the dsl.

def mulDiv(i: Var[Int]): Decl[Var[Int]] = for {
x <- i.rmap(_ * 2)
y <- x.rmap(_ / 2)
} yield y

proc[Int](mulDiv(_) >>= mulDiv)
// res4: Resolver[IO, Int, Int] = gql.resolver.Resolver@25ef4a6f

proc[Int](mulDiv(_) >>= mulDiv >>= mulDiv)
// res5: Resolver[IO, Int, Int] = gql.resolver.Resolver@53fdfaf6

Toplevel expressions​

It is recommended to always work in a scope with your effect type (F) bound, to ease inference and type signatures. +This can is expressed naturally with the dsl.

def mulDiv(i: Var[Int]): Decl[Var[Int]] = for {
x <- i.rmap(_ * 2)
y <- x.rmap(_ / 2)
} yield y

proc[Int](mulDiv(_) >>= mulDiv)
// res4: Resolver[IO, Int, Int] = gql.resolver.Resolver@2ef4e668

proc[Int](mulDiv(_) >>= mulDiv >>= mulDiv)
// res5: Resolver[IO, Int, Int] = gql.resolver.Resolver@32711b16

Toplevel expressions​

It is recommended to always work in a scope with your effect type (F) bound, to ease inference and type signatures. There is however support for toplevel proc resolver expressions.

def toplevelMulDiv[F[_]](i: Var[Int]): ResolverDecl[F, Var[Int]] = {
val d = dsl[F]
import d._
for {
x <- i.rmap(_ * 2)
y <- x.rmap(_ / 2)
} yield y
}

Passing the dsl as an implicit parameter is also an option.

def toplevelMulDiv[F[_]](i: Var[Int])(implicit d: ResolverArrowDsl[F]): ResolverDecl[F, Var[Int]] = {
import d._
for {
x <- i.rmap(_ * 2)
y <- x.rmap(_ / 2)
} yield y
}

Lifting arguments​

Request arguments is made easier by the arrow dsl.

proc[Int] { i =>
for {
x <- i.evalMap(x => IO(x + 2))
y <- argument(arg[Int]("age"))
z <- (x, y).tupled.evalMap { case (a, b) => IO(a + b) }
} yield z
}

Choice​

The dsl also covers ArrowChoice's choice combinator.

proc[Int] { i =>
for {
x <- i.rmap(v => if (v > 5) Left(v) else Right(v))
y <- x.choice(
l => l.rmap(_ * 2),
r => for {
a <- argument(arg[Int]("age"))
out <- (a, r, i).tupled.rmap{ case (a, b, c) => a + b + c }
} yield out
)
} yield y
}

Batching example​

Some steps commonly occur when writing batched resolvers:

  1. Pulling an id out of the parent datatype.
  2. Passing the id to a batching resolver.
  3. Pairing the batched output with the parent datatype.

This pairing requires some clever use of first and contramap/lmap. This behaviour is much easier to express monadically since we have access to closures.

def getAddresses(ids: Set[Int]): IO[Map[Int, String]] =
IO(ids.toList.map(id => id -> s"Address $id").toMap)

case class DataType(id: Int, name: String)
proc[DataType] { i =>
for {
id <- i.rmap(_.id)
r = Resolver.inlineBatch[IO, Int, String](getAddresses).opt
(addr: Var[Option[String]]) <- id.andThen(r)
p = (i, addr).tupled
out <- p.rmap{ case (dt, a) => s"${dt.name} @ ${a.getOrElse("<unknown>")}" }
} yield out
}

Arrowless final?​

Expressions can be declared for any arrow, not just Resolver. The usefullness of this property is not significant, but an interesting property nonetheless.

import cats.free._
import cats.arrow._
def mulDiv[F2[_, _]](v: Var[Int]): Free[DeclAlg[F2, *], Var[Int]] = {
val d = new Language[F2] {}
import d._
// We can ask for the arrow evidence that must occur when some proc compiles us
askArrow.flatMap{ implicit arrow: Arrow[F2] =>
for {
x <- v.rmap(_ * 2)
y <- x.rmap(_ / 2)
} yield y
}
}

proc[Int] { i =>
for {
x <- i.rmap(_ * 2)
y <- mulDiv(x)
} yield y
}
- + \ No newline at end of file diff --git a/docs/server/schema/compiler/index.html b/docs/server/schema/compiler/index.html index 3423b887..2b6ce11c 100644 --- a/docs/server/schema/compiler/index.html +++ b/docs/server/schema/compiler/index.html @@ -4,7 +4,7 @@ Compiler | gql - + @@ -13,7 +13,7 @@ The output of query compilation is either an error or an Application; an executable version of the query that closes over all required inputs:

import gql._

sealed trait Application[F[_]]
object Application {
final case class Query[F[_]](run: F[QueryResult]) extends Application[F]
final case class Mutation[F[_]](run: F[QueryResult]) extends Application[F]
final case class Subscription[F[_]](run: fs2.Stream[F, QueryResult]) extends Application[F]
}

For most applications there is need for more steps than just preparing the query. For instance production deployments can implement features such as caching, logging, metrics, tracing, authorization, to name a few. The compiler utility consists of methods for (parsing, preparing and assembling an application) and can be composed to solve sophisticated use cases.

For instance, say that we would like to modify a phase in query compilation, such that the final executable logs queries that are too slow.

import gql._
import cats.implicits._
import cats.effect._
import cats.effect.implicits._
import scala.concurrent.duration._

trait Logger[F[_]] {
def warn(msg: String): F[Unit]
}

def lg: Logger[IO] = ???

def logSlowQueries(query: String, app: Application[IO]): Application[IO] = app match {
case Application.Query(fa) =>
Application.Query {
fa.timed.flatMap{ case (dur, a) =>
if (dur > 1.second) lg.warn(s"Slow query: $query") as a
else IO.pure(a)
}
}
case x => x
}

Or another example, we have a cache that we wish to clear between subscription events.

trait Cache[F[_]] {
def clear: F[Unit]
// other cache related functions ...
}

def addCacheClearing(cache: Cache[IO], app: Application[IO]): Application[IO] = app match {
case Application.Subscription(stream) =>
Application.Subscription {
// gql doesnt not evaluate the next event before the previous has been consumed
stream.evalTap(_ => cache.clear)
}
case x => x
}
- + \ No newline at end of file diff --git a/docs/server/schema/context/index.html b/docs/server/schema/context/index.html index 037318e5..4ae0e83e 100644 --- a/docs/server/schema/context/index.html +++ b/docs/server/schema/context/index.html @@ -4,7 +4,7 @@ Context | gql - + @@ -13,7 +13,7 @@ gql has no such concept, it is rather a by-product of being written in tagless style.

MTL​

We can emulate context by using a ReaderT/Kleisli monad transformer from cats. Writing ReaderT/Kleisli everywhere is tedious, instead consider opting for cats.mtl.Ask:

import gql._
import gql.dsl.all._
import gql.ast._
import cats.mtl.Ask
import cats._
import cats.data._
import cats.implicits._
import io.circe._
import cats.effect._
import cats.effect.unsafe.implicits.global
import io.circe.syntax._

final case class Context(
userId: String
)

def queries[F[_]: Functor](implicit A: Ask[F, Context]): Type[F, Unit] =
tpe[F, Unit](
"Query",
"me" -> eff(_ => A.ask.map(_.userId))
)

type G[A] = Kleisli[IO, Context, A]

def query = """
query {
me
}
"""

Statistics[IO].flatMap{ stats =>
val schema =
Schema.query(stats.mapK(Kleisli.liftK[IO, Context]))(queries[G])

Compiler[G].compile(schema, query) match {
case Right(Application.Query(fa)) =>
fa
.run(Context("john_doe"))
.map(_.asJson)
}
}.unsafeRunSync()
// res0: Json = JObject(
// value = object[data -> {
// "me" : "john_doe"
// }]
// )

Working in a specific effect​

If you are working in a specific effect, you most likely have more tools to work with. For instance, if you are using IO, you can use IOLocal to wire context through your application.


trait Authorized {
def getAuth: IO[Ior[String, Context]]
}

object Authorized {
def fromIOLocal(iol: IOLocal[Option[Context]]) = new Authorized {
def getAuth = iol.get.map{
case None => Ior.Left("You must authorize to perform this action")
case Some(c) => Ior.Right(c)
}
}
}

def makeSchema(implicit auth: Authorized): Schema[IO, Unit, Unit, Unit] = ???

IOLocal[Option[Context]](None).flatMap{ implicit loc =>
implicit val auth = Authorized.fromIOLocal(loc)

def s = makeSchema

def runQueryWithSchema: IO[Unit] = ???

def runAuthorizedQuery(userId: String): IO[Unit] =
loc.set(Some(Context(userId))) >> runQueryWithSchema

runAuthorizedQuery("john_doe")
}
- + \ No newline at end of file diff --git a/docs/server/schema/dsl/index.html b/docs/server/schema/dsl/index.html index 77c7d630..a5910b50 100644 --- a/docs/server/schema/dsl/index.html +++ b/docs/server/schema/dsl/index.html @@ -4,14 +4,14 @@ The DSL | gql - +

The DSL

gql's dsl is a lightweight set of smart-constructors. If you have a particular usecase or even coding style that conflicts with the dsl, you can always introduce your own schema definition syntax or build on top of the existing dsl.

Lets begin by importing what we need.

import cats.data._
import cats.effect._
import cats.implicits._
import gql.dsl.all._
import gql.ast._
import gql.resolver._

Fields​

The simplest form of field construction comes from the build.from smart constructor. -It simply lifts a resolver into a field, given that a gql output type exists for the resolver output.

def r: Resolver[IO, Int, String] = Resolver.lift(i => i.toString())

val f: Field[IO, Int, String] = build.from(r)
// f: Field[IO, Int, String] = Field(
// resolve = gql.resolver.Resolver@539567dd,
// output = cats.Always@141211d5,
// description = None,
// attributes = List()
// )

Sometimes type inference cannot find the proper type for a field:

build.from(Resolver.liftF(i => IO(i.toString())))
// error: value liftF is not a member of object gql.resolver.Resolver
// did you mean lift? or perhaps liftFull?
// build.from(Resolver.liftF(i => IO(i.toString())))
// ^^^^^^^^^^^^^^

The type parameters for build are partially applied, such that when type inference isn't enough, types can be supplied explicitly.

build[IO, Int].from(Resolver.effect(i => IO(i.toString())))

build.from(Resolver.effect((i: Int) => IO(i.toString())))

For some fields, there is an even more concise syntax. +It simply lifts a resolver into a field, given that a gql output type exists for the resolver output.

def r: Resolver[IO, Int, String] = Resolver.lift(i => i.toString())

val f: Field[IO, Int, String] = build.from(r)
// f: Field[IO, Int, String] = Field(
// resolve = gql.resolver.Resolver@7a72eb8a,
// output = cats.Always@2be93db0,
// description = None,
// attributes = List()
// )

Sometimes type inference cannot find the proper type for a field:

build.from(Resolver.liftF(i => IO(i.toString())))
// error: value liftF is not a member of object gql.resolver.Resolver
// did you mean lift? or perhaps liftFull?
// build.from(Resolver.liftF(i => IO(i.toString())))
// ^^^^^^^^^^^^^^

The type parameters for build are partially applied, such that when type inference isn't enough, types can be supplied explicitly.

build[IO, Int].from(Resolver.effect(i => IO(i.toString())))

build.from(Resolver.effect((i: Int) => IO(i.toString())))

For some fields, there is an even more concise syntax. Invoking the apply method of build, takes a higher order function that goes from the identity resolver (Resolver[F, A, A]) to some output.

build[IO, Int](_.map(i => i * 2).evalMap(i => IO(i))): Field[IO, Int, Int]

Builders​

Complex structures may require many special resolver compositions. The dsl also introduces a something akin to a builder pattern. The build function from the previous section, creates a builder that has more constructors than just from and apply.

import gql.dsl.FieldBuilder
val b: FieldBuilder[IO, Int] = build[IO, Int]

Often a builder is only relevant within a scope, thus one can end up having many unused builders in scope. @@ -24,7 +24,7 @@ For two fields with the same name, gql will always pick the concrete field. If both are concrete, it will prioritize the field from the subtype (the type you're working on).

trait Pet {
def name: String
def age: Int
def weight: Double
}

case class Dog(name: String, age: Int, weight: Double) extends Pet

implicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](
"Pet",
"name" -> lift(_.name),
"age" -> lift(_.age),
"weight" -> lift(_.weight)
)

lazy val overwirttenName = lift[Dog](_.name)

implicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](
"Dog",
"bark" -> lift(_ => "woof!"),
"name" -> overwirttenName
).subtypeImpl[Pet]

dog.fields.map{ case (k, _) => k}.mkString_(", ")
// res13: String = "bark, name, age, weight"

// The Dog type has it's own implementation of the name field
dog.fields.exists{ case (_, v) => v == overwirttenName }
// res14: Boolean = true

To showcase the inheritance a bit further, consider the following invalid schema.

implicit lazy val pet: Interface[IO, Pet] = interface[IO, Pet](
"Pet",
"name" -> lift(_.name),
"age" -> lift(_.age),
// Notice that weight is abstract
"weight" -> abst[IO, Double]
)

implicit lazy val dog: Type[IO, Dog] = tpe[IO, Dog](
"Dog",
"bark" -> lift(_ => "woof!")
).subtypeImpl[Pet]

// We are missing the weight field
dog.fields.map{ case (k, _) => k}.mkString_(", ")
// res15: String = "bark, name, age"
tip

Schema validation will catch such errors.

Input types​

Review the Input types section for more information.

Other output structures​

Examples of other structures can be in the Output types section.

Covariant effects​

Output types in gql are covariant in F, such that output types written in different effects seamlessly weave together. fs2 provides a type that we can reuse for pure effects defined as type Pure[A] <: Nothing.

With this trick, we can define gql types for trivial cases of our domain:

final case class Entity(
name: String,
age: Int
)

object Entity {
implicit lazy val gqlType: Type[fs2.Pure, Entity] = tpe[fs2.Pure, Entity](
"Entity",
"name" -> lift(_.name),
"age" -> lift(_.age)
)
}

trait Example

tpe[IO, Example](
"Example",
"entity" -> lift(_ => Entity("John Doe", 42))
)
- + \ No newline at end of file diff --git a/docs/server/schema/error_handling/index.html b/docs/server/schema/error_handling/index.html index 2fbd00c1..df0f2a15 100644 --- a/docs/server/schema/error_handling/index.html +++ b/docs/server/schema/error_handling/index.html @@ -4,14 +4,14 @@ Error handling | gql - +

Error handling

There are different types of errors in gql.

  • Schema validation errors, which should be caught in development. -These are for instance caused by duplicate field names or invalid typenames.
  • Query preparation errors, which are errors caused by invalid queries.
  • Execuion errors. These are errors that occur during query evaluation, caused by resolvers that fail.

Execution​

Error handling in gql can be performed in two ways, it can be returned explicitly or raised in F.

Examples​

Let's setup the scene:

import gql.ast._
import gql.dsl.all._
import gql.dsl.all.value._
import gql._
import cats.implicits._
import cats.data._
import cats.effect._
import cats.effect.unsafe.implicits.global
import io.circe.syntax._

def multifailSchema =
tpe[IO, Unit](
"Query",
"field" -> build.from(arged(arg[Int]("i", scalar(10))).evalMap{
case 0 => IO.pure(Ior.left("fail gracefully"))
case 1 => IO.raiseError(new Exception("fail hard"))
case i => IO.pure(Ior.right(i))
}.rethrow)
)

def go(query: String, tpe: Type[IO, Unit] = multifailSchema) =
Schema.query(tpe).flatMap { sch =>
Compiler[IO].compile(sch, query) match {
case Left(err) =>
println(err)
IO.pure(err.asJson)
case Right(Application.Query(fa)) =>
fa.map{x => println(x.errors);x.asJson }
}
}.unsafeRunSync()

go("query { field }")
// Chain()
// res0: io.circe.Json = JObject(
// value = object[data -> {
// "field" : 10
// }]
// )

A query can fail gracefully by returning Ior.left:

go("query { field(i: 0) }")
// Chain(Error(Right(fail gracefully),Chain("field")))
// res1: io.circe.Json = JObject(
// value = object[data -> {
// "field" : null
// },errors -> [
// {
// "message" : "fail gracefully",
// "path" : [
// "field"
// ]
// }
// ]]
// )

A query can fail hard by raising an exception:

go("query { field(i: 1) }")
// Chain(Error(Left(java.lang.Exception: fail hard),Chain("field")))
// res2: io.circe.Json = JObject(
// value = object[data -> {
// "field" : null
// },errors -> [
// {
// "message" : "internal error",
// "path" : [
// "field"
// ]
// }
// ]]
// )

A query can also fail before even evaluating the query:

go("query { nonExisting }")
// Preparation(Chain(PositionalError(Cursor(Chain()),List(Caret(0,8,8)),Field 'nonExisting' is not a member of `Query`.)))
// res3: io.circe.Json = JObject(
// value = object[errors -> [
// {
// "message" : "Field 'nonExisting' is not a member of `Query`.",
// "locations" : [
// {
// "line" : 0,
// "column" : 8
// }
// ]
// }
// ]]
// )

And finally, it can fail if it isn't parsable:

def largerQuery = """
query {
field1
field2(test: 42)
}

fragment test on Test {
-value1
value2
}
"""

go(largerQuery)
// Parse(ParseError(Caret(8,4,80),cats.Always@20fed7cb))
// res4: io.circe.Json = JObject(
// value = object[errors -> [
// {
// "message" : "could not parse query",
// "locations" : [
// {
// "line" : 8,
// "column" : 4
// }
// ],
// "error" : "\u001b[34mfailed at offset 80 on line 7 with code 45\none of \"...\"\nin char in range A to Z (code 65 to 90)\nin char in range _ to _ (code 95 to 95)\nin char in range a to z (code 97 to 122)\nfor document:\n\u001b[0m\u001b[32m| \u001b[0m\u001b[32m\n| query {\n| field1\n| field2(test: 42)\n| }\n| \n| fragment test on Test {\n| \u001b[41m\u001b[30m-\u001b[0m\u001b[32mvalue1\n| \u001b[31m>^^^^^^^ line:7, column:4, offset:80, character code code:45\u001b[0m\u001b[32m\n| value2 \n| }\n| \u001b[0m\u001b[0m"
// }
// ]]
// )

Parser errors also look nice in ANSI terminals:

Terminal output

Exception trick​

If for whatever reason you wish to pass information through exceptions, that is also possible:

final case class MyException(msg: String, data: Int) extends Exception(msg)

val res =
Schema.query(
tpe[IO, Unit](
"Query",
"field" -> eff(_ => IO.raiseError[String](MyException("fail hard", 42)))
)
).flatMap { sch =>
Compiler[IO].compile(sch, "query { field } ") match {
case Right(Application.Query(run)) => run
}
}.unsafeRunSync()
// res: QueryResult = QueryResult(
// data = object[field -> null],
// errors = Singleton(
// a = Error(
// error = Left(value = MyException(msg = "fail hard", data = 42)),
// path = Singleton(a = JString(value = "field"))
// )
// )
// )

res.errors.headOption.flatMap(_.error.left.toOption) match {
case Some(MyException(_, data)) => println(s"Got data: $data")
case _ => println("No data")
}
// Got data: 42
- +These are for instance caused by duplicate field names or invalid typenames.
  • Query preparation errors, which are errors caused by invalid queries.
  • Execuion errors. These are errors that occur during query evaluation, caused by resolvers that fail.
  • Execution​

    Error handling in gql can be performed in two ways, it can be returned explicitly or raised in F.

    Examples​

    Let's setup the scene:

    import gql.ast._
    import gql.dsl.all._
    import gql.dsl.all.value._
    import gql._
    import cats.implicits._
    import cats.data._
    import cats.effect._
    import cats.effect.unsafe.implicits.global
    import io.circe.syntax._

    def multifailSchema =
    tpe[IO, Unit](
    "Query",
    "field" -> build.from(arged(arg[Int]("i", scalar(10))).evalMap{
    case 0 => IO.pure(Ior.left("fail gracefully"))
    case 1 => IO.raiseError(new Exception("fail hard"))
    case i => IO.pure(Ior.right(i))
    }.rethrow)
    )

    def go(query: String, tpe: Type[IO, Unit] = multifailSchema) =
    Schema.query(tpe).flatMap { sch =>
    Compiler[IO].compile(sch, query) match {
    case Left(err) =>
    println(err)
    IO.pure(err.asJson)
    case Right(Application.Query(fa)) =>
    fa.map{x => println(x.errors);x.asJson }
    }
    }.unsafeRunSync()

    go("query { field }")
    // Chain()
    // res0: io.circe.Json = JObject(
    // value = object[data -> {
    // "field" : 10
    // }]
    // )

    A query can fail gracefully by returning Ior.left:

    go("query { field(i: 0) }")
    // Chain(Error(Right(fail gracefully),Chain("field")))
    // res1: io.circe.Json = JObject(
    // value = object[data -> {
    // "field" : null
    // },errors -> [
    // {
    // "message" : "fail gracefully",
    // "path" : [
    // "field"
    // ]
    // }
    // ]]
    // )

    A query can fail hard by raising an exception:

    go("query { field(i: 1) }")
    // Chain(Error(Left(java.lang.Exception: fail hard),Chain("field")))
    // res2: io.circe.Json = JObject(
    // value = object[data -> {
    // "field" : null
    // },errors -> [
    // {
    // "message" : "internal error",
    // "path" : [
    // "field"
    // ]
    // }
    // ]]
    // )

    A query can also fail before even evaluating the query:

    go("query { nonExisting }")
    // Preparation(Chain(PositionalError(Cursor(Chain()),List(Caret(0,8,8)),Field 'nonExisting' is not a member of `Query`.)))
    // res3: io.circe.Json = JObject(
    // value = object[errors -> [
    // {
    // "message" : "Field 'nonExisting' is not a member of `Query`.",
    // "locations" : [
    // {
    // "line" : 0,
    // "column" : 8
    // }
    // ]
    // }
    // ]]
    // )

    And finally, it can fail if it isn't parsable:

    def largerQuery = """
    query {
    field1
    field2(test: 42)
    }

    fragment test on Test {
    -value1
    value2
    }
    """

    go(largerQuery)
    // Parse(ParseError(Caret(8,4,80),cats.Always@4f58e46b))
    // res4: io.circe.Json = JObject(
    // value = object[errors -> [
    // {
    // "message" : "could not parse query",
    // "locations" : [
    // {
    // "line" : 8,
    // "column" : 4
    // }
    // ],
    // "error" : "\u001b[34mfailed at offset 80 on line 7 with code 45\none of \"...\"\nin char in range A to Z (code 65 to 90)\nin char in range _ to _ (code 95 to 95)\nin char in range a to z (code 97 to 122)\nfor document:\n\u001b[0m\u001b[32m| \u001b[0m\u001b[32m\n| query {\n| field1\n| field2(test: 42)\n| }\n| \n| fragment test on Test {\n| \u001b[41m\u001b[30m-\u001b[0m\u001b[32mvalue1\n| \u001b[31m>^^^^^^^ line:7, column:4, offset:80, character code code:45\u001b[0m\u001b[32m\n| value2 \n| }\n| \u001b[0m\u001b[0m"
    // }
    // ]]
    // )

    Parser errors also look nice in ANSI terminals:

    Terminal output

    Exception trick​

    If for whatever reason you wish to pass information through exceptions, that is also possible:

    final case class MyException(msg: String, data: Int) extends Exception(msg)

    val res =
    Schema.query(
    tpe[IO, Unit](
    "Query",
    "field" -> eff(_ => IO.raiseError[String](MyException("fail hard", 42)))
    )
    ).flatMap { sch =>
    Compiler[IO].compile(sch, "query { field } ") match {
    case Right(Application.Query(run)) => run
    }
    }.unsafeRunSync()
    // res: QueryResult = QueryResult(
    // data = object[field -> null],
    // errors = Singleton(
    // a = Error(
    // error = Left(value = MyException(msg = "fail hard", data = 42)),
    // path = Singleton(a = JString(value = "field"))
    // )
    // )
    // )

    res.errors.headOption.flatMap(_.error.left.toOption) match {
    case Some(MyException(_, data)) => println(s"Got data: $data")
    case _ => println("No data")
    }
    // Got data: 42
    + \ No newline at end of file diff --git a/docs/server/schema/extending/index.html b/docs/server/schema/extending/index.html index 188eaf4d..75a5d822 100644 --- a/docs/server/schema/extending/index.html +++ b/docs/server/schema/extending/index.html @@ -4,7 +4,7 @@ Extending schemas | gql - + @@ -12,7 +12,7 @@

    Extending schemas

    The AST in gql is subject to extension. In particular, the schema can be used to write arbitary information that can later be used for various purposes.

    Integrations that use schema extensions are the goi and relational integrations.

    Lets get some imports ready before we start.

    import gql._
    import gql.dsl.all._
    import gql.ast._
    import gql.resolver._
    import cats.effect._
    import cats._
    import cats.data._
    import cats.implicits._

    For this showcase, our goal will be to add authorization to any schema.

    To extend the schema with new attributes we must define what attribute we wish to embed into the schema. We can extend a special trait based on the ast node we wish to extend.

    case class AuthorizedField(
    permissions: List[String]
    ) extends FieldAttribute[fs2.Pure]

    Lets also introduce some functions related to authorization.

    def checkPermissions(token: String, permissions: List[String]): IO[Boolean] = ???

    Now we will use our new attribute to create a dsl for out extension.

    def authed[A, B](perms: String*)(field: Field[IO, A, B]): Field[IO, A, B] = {
    val permissions = perms.toList
    field
    .addAttributes(AuthorizedField(permissions))
    .compose(Resolver.id[IO, A].arg(arg[String]("secretToken")).evalMap{ case (token, a) =>
    checkPermissions(token, permissions).map{
    case false => s"your token didn't satisfy the permissions ${permissions.mkString(", ")}".leftIor
    case true => a.rightIor
    }
    }.rethrow)
    .document(s"Requires permissions ${permissions.mkString(", ")}")
    }

    We can now use our authorization function.

    case class Person(name: String, age: Int)
    implicit lazy val person: Type[IO, Person] = tpe[IO, Person](
    "Person",
    "name" -> authed("read:name") {
    lift(_.name)
    },
    "age" -> lift(_.name),
    "name2" -> authed("read:name", "read:name2") {
    authed("read:name") {
    lift(_.age)
    }
    },
    )

    Now notice two things:

    1. We forgot to add authorization to the age field.
    2. We added authorization twice to the name2 field by mistake.

    We will catch both of these errors by validating our schema.

    sealed trait Error
    object Error {
    case class MultiplePermissionLists(field: String, perms: List[List[String]]) extends Error {
    override def toString =
    s"Field '$field' has multiple permission lists: ${perms.map(ps => s"{${ps.mkString(",")}}").mkString(", ")}"
    }
    case class MissingPermission(field: String) extends Error {
    override def toString = s"Field '$field' is missing a permission list"
    }
    }

    def validate(schema: SchemaShape[IO, ?, ?, ?]): Chain[Error] = {
    import SchemaShape._
    import VisitNode._
    val fa = schema.visitOnce[Eval, Chain[Error]]{
    case FieldNode(name, f: Field[IO, ?, ?]) =>
    Eval.now {
    f.attributes.collect{ case a: AuthorizedField => a } match {
    case Nil => Chain(Error.MissingPermission(name))
    case a :: Nil => Chain.empty
    case ys => Chain(Error.MultiplePermissionLists(name, ys.map(_.permissions)))
    }
    }
    }

    fa.value
    }

    Lets see what happens when we validate our schema.

    lazy val s = SchemaShape.unit[IO](
    fields[IO, Unit](
    "person" -> lift(_ => Person("John", 42))
    )
    )

    validate(s).toList.foreach(println)
    // Field 'person' is missing a permission list
    // Field 'age' is missing a permission list
    // Field 'name2' has multiple permission lists: {read:name}, {read:name,read:name2}

    Notice that the errors we expected were caught by our validation.

    - + \ No newline at end of file diff --git a/docs/server/schema/index.html b/docs/server/schema/index.html index 8f81c3ce..af680ff8 100644 --- a/docs/server/schema/index.html +++ b/docs/server/schema/index.html @@ -4,7 +4,7 @@ The schema | gql - + @@ -16,7 +16,7 @@ Solving this would require an infinite amount of time. An exmaple follows:

    final case class A()

    def cyclicType(i: Int): Type[IO, A] = {
    if (i < 10000) tpe[IO, A](
    "A",
    "a" -> lift((_: A) => A())(cyclicType(i + 1))
    )
    else tpe[IO, A](
    "A",
    "a" -> lift(_ => "now I'm a string :)")
    )
    }

    implicit lazy val cyclic: Type[IO, A] = cyclicType(0)

    def recursiveSchema = SchemaShape.unit[IO](fields("a" -> lift(_ => A())))

    recursiveSchema.validate.toList.mkString("\n")
    // res2: String = "Cyclic type `A` is not reference equal. Use lazy val or `cats.Eval` to declare this type. at root.Query.a.A.a.A"

    After 10000 iterations the type is no longer unifyable.

    One can also choose to simply ignore some of the validation errors:

    recursiveSchema.validate.filter{
    case Validation.Problem(Validation.Error.CyclicDivergingTypeReference("A"), _) => false
    case _ => true
    }
    // res3: cats.data.Chain[Validation.Problem] = Chain()
    info

    Validation does not attempt structural equallity since this can have unforseen performance consequences.

    For instance, if the whole graph was defined with defs, one could very easily accedentally construct a case of exponential running time.

    Schema​

    A Schema is a collection of some components that are required to execute a query. The Schema contains a SchemaShape, a Statistics instance, a query Planner implementation and state regarding BatchResolver implementations and Directives.

    tip

    Check out the statistics section for more information on the Statistics object.

    Also, check out the planning section for more information on how the default query planner works.

    Finally, you can look in the resolver section for more information on BatchResolvers.

    The most powerful Schema constructor stateful, converts a State[SchemaState[F], SchemaShape[F, Q, M, S]] to a Schema[F, Q, M, S].

    - + \ No newline at end of file diff --git a/docs/server/schema/input_types/index.html b/docs/server/schema/input_types/index.html index ea664d40..7a980a15 100644 --- a/docs/server/schema/input_types/index.html +++ b/docs/server/schema/input_types/index.html @@ -4,7 +4,7 @@ Input types | gql - + @@ -19,7 +19,7 @@ Consult the Default values for input objects subsection for more information.

    Args also have an Apply (Applicative without pure) instance defined for them:

    import cats.implicits._

    (arg[Int]("arg1"), arg[Int]("arg2", scalar(43))).mapN(_ + _)

    arg[Int]("arg1") *> arg[Int]("arg2", scalar(44))

    Args can naturally be used in field definitions:

    import cats._
    import cats.effect._

    final case class Data(str: String)

    tpe[IO, Data](
    "Something",
    "field" ->
    lift(arg[String]("arg1", scalar("default"))){ case (arg1, data) =>
    data.str + arg1
    }
    )

    Input​

    An input consists of a name along with some fields. It turns out that arguments and fields have the same properties and as such, Arg is used for fields.

    final case class InputData(
    name: String,
    age: Int
    )

    input[InputData](
    "InputData",
    (
    arg[String]("name"),
    arg[Int]("age", scalar(42))
    ).mapN(InputData.apply)
    )

    Default values for input objects​

    For input objects however, a default value cannot be properly type checked at compile time, since the default value might be partial. For instance, cosider the following input type.

    final case class SomeInput(
    a: Int,
    b: String,
    c: Seq[Int],
    d: Option[Int]
    )

    implicit lazy val someInput: Input[SomeInput] = input[SomeInput](
    "SomeInput",
    (
    arg[Int]("a", scalar(42)),
    arg[String]("b"),
    arg[Seq[Int]]("c", arr(scalar(1), scalar(2), scalar(3))),
    arg[Option[Int]]("d", scalar(42))
    ).mapN(SomeInput.apply)
    )

    Two valid uses of this type could for instance be:

    arg[SomeInput](
    "someInput1",
    obj(
    "a" -> scalar(42),
    "b" -> scalar("hello1"),
    "c" -> arr(Seq(1, 2, 3).map(scalar(_)): _*)
    )
    )

    arg[SomeInput](
    "someInput2",
    obj(
    "b" -> scalar("hello2"),
    "d" -> nullValue
    )
    )

    Input validation​

    A function emap exists on arg, that maps the input to Either[String, B] for some B.

    import cats.data._

    final case class ValidatedInput(
    a: Int,
    b: NonEmptyList[Int]
    )

    input[ValidatedInput](
    "ValidatedInput",
    (
    arg[Int]("a", scalar(42), "May not be negative")
    .emap(i => if (i < 0) s"Negative value: $i".asLeft else i.asRight),

    arg[Seq[Int]]("b", arr(scalar(1), scalar(2), scalar(3)), "NonEmpty")
    .emap(xs => xs.toList.toNel.toRight("Input is empty.")),

    ).mapN(ValidatedInput.apply)
    .emap(v => if (v.a > v.b.combineAll) "a must be larger than the sum of bs".asLeft else v.asRight)
    ).document("The field `a` must be larger than the sum of `b`.")
    - + \ No newline at end of file diff --git a/docs/server/schema/output_types/index.html b/docs/server/schema/output_types/index.html index 9cc5e948..856464fb 100644 --- a/docs/server/schema/output_types/index.html +++ b/docs/server/schema/output_types/index.html @@ -4,7 +4,7 @@ Output types | gql - + @@ -26,7 +26,7 @@ This also means that even if you have a type declared it must occur in the ast to be respected.

    You might want to declare types that are not yet queryable. Or maybe you only expose an interface, but there re no reachable references to any implementing types, thus the implementations won't be discovered.

    The schema lets you declare "extra" types that should occur in introspection, rendering and evaluation:

    def getNode: Node = Company("gql", "1")

    def shape = SchemaShape.unit[IO](fields("node" -> lift(_ => getNode)))

    println(shape.render)
    // type Query {
    // node: Node!
    // }
    //
    // interface Node {
    // id: ID!
    // }

    def withCompany = shape.addOutputTypes(company)

    println(withCompany.render)
    // type Company implements Node {
    // name: String!
    // id: ID!
    // }
    //
    // interface Node {
    // id: ID!
    // }
    //
    // type Query {
    // node: Node!
    // }

    println(withCompany.addOutputTypes(person).render)
    // type Company implements Node {
    // name: String!
    // id: ID!
    // }
    //
    // interface Node {
    // id: ID!
    // }
    //
    // type Query {
    // node: Node!
    // }
    //
    // type Person implements Node {
    // name: String!
    // id: ID!
    // }

    Variance of the Out type​

    The Out[F[_], A] is invariant in A. It might seem convinient to let A be contravariant (-A) but this causes ambiguity when trying to find implicits/givens.

    trait Typeclass[-A]

    trait Animal
    trait Dog extends Animal

    implicit object AnimalTC extends Typeclass[Animal]
    implicit object DogTC extends Typeclass[Dog]

    implicitly[Typeclass[Dog]]
    // error: ambiguous implicit values:
    // both object DogTC of type DogTC.type
    // and object AnimalTC of type AnimalTC.type
    // match expected type Typeclass[Dog]
    // implicitly[Typeclass[Dog]]
    // ^^^^^^^^^^^^^^^^^^^^^^^^^^
    - + \ No newline at end of file diff --git a/docs/server/schema/resolvers/index.html b/docs/server/schema/resolvers/index.html index a8d4de47..88020109 100644 --- a/docs/server/schema/resolvers/index.html +++ b/docs/server/schema/resolvers/index.html @@ -4,7 +4,7 @@ Resolvers | gql - + @@ -12,20 +12,20 @@

    Resolvers

    Resolvers are at the core of gql; a resolver Resolver[F, I, O] takes an I and produces an O in effect F. Resolvers are embedded in fields and act as continuations. When gql executes a query it first constructs a tree of continueations from your schema and the supplied GraphQL query.

    Resolvers act and compose like functions with combinators such as andThen and compose.

    tip

    Resolver forms an Arrow and Choice.

    Lets start off with some imports:

    import gql._
    import gql.dsl.all._
    import gql.resolver._
    import gql.ast._
    import cats.effect._
    import cats.implicits._
    import cats.data._

    Resolvers​

    Resolver is a collection of high-level combinators that constructs a tree of Step.

    note

    If you are familiar with the relationship between fs2.Stream and fs2.Pull, then the relationship between Resolver and Step should be familiar.

    Lift​

    Resolver.lift lifts a function I => O into Resolver[F, I, O]. -lift's method form is map, which for any resolver Resolver[F, I, O] produces a new resolver Resolver[F, I, O2] given a function O => O2.

    val r = Resolver.lift[IO, Int](_.toLong)
    // r: Resolver[IO, Int, Long] = gql.resolver.Resolver@490ce5f5
    r.map(_.toString())
    // res0: Resolver[IO, Int, String] = gql.resolver.Resolver@b69376a

    Effect​

    effect like lift lifts a function, but instead an effectful one like I => F[O] into Resolver[F, I, O]. -effect's method form is evalMap (like Resource and fs2.Stream).

    val r = Resolver.effect[IO, Int](i => IO(i.toLong))
    // r: Resolver[IO, Int, Long] = gql.resolver.Resolver@7475236f
    r.evalMap(l => IO(l.toString()))
    // res1: Resolver[[x]IO[x], Int, String] = gql.resolver.Resolver@16a819c9

    Arguments​

    Arguments in gql are provided through resolvers. -A resolver Resolver[F, I, A] can be constructed from an argument Arg[A], through either argument or arg in method form.

    lazy val ageArg = arg[Int]("age")
    val r = Resolver.argument[IO, Nothing, String](arg[String]("name"))
    // r: Resolver[IO, Nothing, String] = gql.resolver.Resolver@38a575a4
    val r2 = r.arg(ageArg)
    // r2: Resolver[IO, Nothing, (Int, String)] = gql.resolver.Resolver@7ab5c7bd
    r2.map{ case (age, name) => s"$name is $age years old" }
    // res2: Resolver[IO, Nothing, String] = gql.resolver.Resolver@2786eddf

    Arg also has an applicative defined for it, so multi-argument resolution can be simplified to.

    val r = Resolver.argument[IO, Nothing, (String, Int)](
    (arg[String]("name"), arg[Int]("age")).tupled
    )
    // r: Resolver[IO, Nothing, (String, Int)] = gql.resolver.Resolver@71b6ceb7
    r.map{ case (age, name) => s"$name is $age years old" }
    // res3: Resolver[IO, Nothing, String] = gql.resolver.Resolver@62ad8d70

    Meta​

    The meta resolver provides metadata regarding query execution, such as the position of query execution, field aliasing and the provided arguments.

    It also allows the caller to inspect the query ast such that more exotic operations become possible. -For instance, arguments can dynamically be inspected.

    lazy val a = arg[Int]("age")
    Resolver.meta[IO, String].map(meta => meta.astNode.arg(a))
    // res4: Resolver[IO, String, Option[Int]] = gql.resolver.Resolver@367136b7

    The relational integration makes heavy use of this feature.

    Errors​

    Errors are reported in cats.data.Ior.

    info

    An Ior is a non-exclusive Either.

    The Ior datatype's left side must be String and acts as an optional error that will be present in the query result. +lift's method form is map, which for any resolver Resolver[F, I, O] produces a new resolver Resolver[F, I, O2] given a function O => O2.

    val r = Resolver.lift[IO, Int](_.toLong)
    // r: Resolver[IO, Int, Long] = gql.resolver.Resolver@2ddcee48
    r.map(_.toString())
    // res0: Resolver[IO, Int, String] = gql.resolver.Resolver@449ecee8

    Effect​

    effect like lift lifts a function, but instead an effectful one like I => F[O] into Resolver[F, I, O]. +effect's method form is evalMap (like Resource and fs2.Stream).

    val r = Resolver.effect[IO, Int](i => IO(i.toLong))
    // r: Resolver[IO, Int, Long] = gql.resolver.Resolver@2ff877c3
    r.evalMap(l => IO(l.toString()))
    // res1: Resolver[[x]IO[x], Int, String] = gql.resolver.Resolver@266a7a55

    Arguments​

    Arguments in gql are provided through resolvers. +A resolver Resolver[F, I, A] can be constructed from an argument Arg[A], through either argument or arg in method form.

    lazy val ageArg = arg[Int]("age")
    val r = Resolver.argument[IO, Nothing, String](arg[String]("name"))
    // r: Resolver[IO, Nothing, String] = gql.resolver.Resolver@6c8e7e7e
    val r2 = r.arg(ageArg)
    // r2: Resolver[IO, Nothing, (Int, String)] = gql.resolver.Resolver@5ed51489
    r2.map{ case (age, name) => s"$name is $age years old" }
    // res2: Resolver[IO, Nothing, String] = gql.resolver.Resolver@32ff26ff

    Arg also has an applicative defined for it, so multi-argument resolution can be simplified to.

    val r = Resolver.argument[IO, Nothing, (String, Int)](
    (arg[String]("name"), arg[Int]("age")).tupled
    )
    // r: Resolver[IO, Nothing, (String, Int)] = gql.resolver.Resolver@47325a27
    r.map{ case (age, name) => s"$name is $age years old" }
    // res3: Resolver[IO, Nothing, String] = gql.resolver.Resolver@5850b131

    Meta​

    The meta resolver provides metadata regarding query execution, such as the position of query execution, field aliasing and the provided arguments.

    It also allows the caller to inspect the query ast such that more exotic operations become possible. +For instance, arguments can dynamically be inspected.

    lazy val a = arg[Int]("age")
    Resolver.meta[IO, String].map(meta => meta.astNode.arg(a))
    // res4: Resolver[IO, String, Option[Int]] = gql.resolver.Resolver@2e5787ee

    The relational integration makes heavy use of this feature.

    Errors​

    Errors are reported in cats.data.Ior.

    info

    An Ior is a non-exclusive Either.

    The Ior datatype's left side must be String and acts as an optional error that will be present in the query result. gql can return an error and a result for the same path, given that Ior has both it's left and right side defined.

    Errors are embedded into resolvers via rethrow. -The extension method rethrow is present on any resolver of type Resolver[F, I, Ior[String, O]]:

    val r = Resolver.lift[IO, Int](i => Ior.Both("I will be in the errors :)", i))
    // r: Resolver[IO, Int, Ior.Both[String, Int]] = gql.resolver.Resolver@2609ddb3
    r.rethrow
    // res5: Resolver[[A]IO[A], Int, Int] = gql.resolver.Resolver@618d6cb3

    We can also use emap to map the current value into an Ior.

    val r = Resolver.id[IO, Int].emap(i => Ior.Both("I will be in the errors :)", i))
    // r: Resolver[IO, Int, Int] = gql.resolver.Resolver@6fc5abff

    First​

    Resolver also implements first (Resolver[F, A, B] => Resolver[F, (A, C), (B, C)]) which can be convinient for situations where one would usually have to trace a value through an entire computation.

    Since a Resolver does not form a Monad, first is necessary to implement non-trivial resolver compositions.

    For instance, maybe your program contains a general resolver compositon that is used many places, like say verifying credentials, but you'd like to trace a value through it without having to keep track of tupling output with input.

    Assume we'd like to implement a resolver, that when given a person's name, can get a list of the person's friends.

    case class PersonId(value: Int)

    case class Person(id: PersonId, name: String)

    def getFriends(id: PersonId, limit: Int): IO[List[Person]] = ???

    def getPerson(name: String): IO[Person] = ???

    def getPersonResolver = Resolver.effect[IO, String](getPerson)

    def limitResolver = Resolver.argument[IO, Person, Int](arg[Int]("limit"))

    def limitArg = arg[Int]("limit")
    getPersonResolver
    // 'arg' tuples the input with the argument value
    .arg(limitArg)
    .evalMap{ case (limit, p) => getFriends(p.id, limit) }
    // res6: Resolver[[x]IO[x], String, List[Person]] = gql.resolver.Resolver@503efc3e

    Batch​

    Like most other GraphQL implementations, gql also supports batching.

    Unlike most other GraphQL implementations, gql's batching implementation features a global query planner that lets gql delay field execution until it can be paired with another field.

    Batch declaration and usage occurs as follows:

    • Declare a function Set[K] => F[Map[K, V]].
    • Give this function to gql and get back a Resolver[F, Set[K], Map[K, V]] in a State monad (for unique id generation).
    • Use this new resolver where you want batching.

    And now put into practice:

    def getPeopleFromDB(ids: Set[PersonId]): IO[List[Person]] = ???

    Resolver.batch[IO, PersonId, Person]{ keys =>
    getPeopleFromDB(keys).map(_.map(x => x.id -> x).toMap)
    }
    // res7: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@38b247cf

    Whenever gql sees this resolver in any composition, it will look for similar resolvers during query planning.

    Note, however, that you should only declare each batch resolver variant once, that is, you should build your schema in State. +The extension method rethrow is present on any resolver of type Resolver[F, I, Ior[String, O]]:

    val r = Resolver.lift[IO, Int](i => Ior.Both("I will be in the errors :)", i))
    // r: Resolver[IO, Int, Ior.Both[String, Int]] = gql.resolver.Resolver@65d48405
    r.rethrow
    // res5: Resolver[[A]IO[A], Int, Int] = gql.resolver.Resolver@306219fa

    We can also use emap to map the current value into an Ior.

    val r = Resolver.id[IO, Int].emap(i => Ior.Both("I will be in the errors :)", i))
    // r: Resolver[IO, Int, Int] = gql.resolver.Resolver@6de6effe

    First​

    Resolver also implements first (Resolver[F, A, B] => Resolver[F, (A, C), (B, C)]) which can be convinient for situations where one would usually have to trace a value through an entire computation.

    Since a Resolver does not form a Monad, first is necessary to implement non-trivial resolver compositions.

    For instance, maybe your program contains a general resolver compositon that is used many places, like say verifying credentials, but you'd like to trace a value through it without having to keep track of tupling output with input.

    Assume we'd like to implement a resolver, that when given a person's name, can get a list of the person's friends.

    case class PersonId(value: Int)

    case class Person(id: PersonId, name: String)

    def getFriends(id: PersonId, limit: Int): IO[List[Person]] = ???

    def getPerson(name: String): IO[Person] = ???

    def getPersonResolver = Resolver.effect[IO, String](getPerson)

    def limitResolver = Resolver.argument[IO, Person, Int](arg[Int]("limit"))

    def limitArg = arg[Int]("limit")
    getPersonResolver
    // 'arg' tuples the input with the argument value
    .arg(limitArg)
    .evalMap{ case (limit, p) => getFriends(p.id, limit) }
    // res6: Resolver[[x]IO[x], String, List[Person]] = gql.resolver.Resolver@7f9c6d5b

    Batch​

    Like most other GraphQL implementations, gql also supports batching.

    Unlike most other GraphQL implementations, gql's batching implementation features a global query planner that lets gql delay field execution until it can be paired with another field.

    Batch declaration and usage occurs as follows:

    • Declare a function Set[K] => F[Map[K, V]].
    • Give this function to gql and get back a Resolver[F, Set[K], Map[K, V]] in a State monad (for unique id generation).
    • Use this new resolver where you want batching.

    And now put into practice:

    def getPeopleFromDB(ids: Set[PersonId]): IO[List[Person]] = ???

    Resolver.batch[IO, PersonId, Person]{ keys =>
    getPeopleFromDB(keys).map(_.map(x => x.id -> x).toMap)
    }
    // res7: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@3e292a01

    Whenever gql sees this resolver in any composition, it will look for similar resolvers during query planning.

    Note, however, that you should only declare each batch resolver variant once, that is, you should build your schema in State. gql consideres different batch instantiations incompatible regardless of any type information.

    State has Monad (and transitively Applicative) defined for it, so it composes well. -Here is an example of multiple batchers:

    def b1 = Resolver.batch[IO, Int, Person](_ => ???)
    def b2 = Resolver.batch[IO, Int, String](_ => ???)

    (b1, b2).tupled
    // res8: State[SchemaState[IO], (Resolver[IO, Set[Int], Map[Int, Person]], Resolver[IO, Set[Int], Map[Int, String]])] = cats.data.IndexedStateT@32504c9e
    tip

    Even if your field doesn't benefit from batching, batching can still do duplicate key elimination.

    Batch resolver syntax​

    When a resolver in a very specific form Resolver[F, Set[K], Map[K, V]], then the gql dsl provides some helper methods. +Here is an example of multiple batchers:

    def b1 = Resolver.batch[IO, Int, Person](_ => ???)
    def b2 = Resolver.batch[IO, Int, String](_ => ???)

    (b1, b2).tupled
    // res8: State[SchemaState[IO], (Resolver[IO, Set[Int], Map[Int, Person]], Resolver[IO, Set[Int], Map[Int, String]])] = cats.data.IndexedStateT@1e2459f7
    tip

    Even if your field doesn't benefit from batching, batching can still do duplicate key elimination.

    Batch resolver syntax​

    When a resolver in a very specific form Resolver[F, Set[K], Map[K, V]], then the gql dsl provides some helper methods. For instance, a batcher may be embedded in a singular context (K => V). -Here is a showcase of some of the helper methods:

    def pb: Resolver[IO, Set[Int], Map[Int, Person]] = 
    // Stub implementation
    Resolver.lift(_ => Map.empty)

    // None if a key is missing
    pb.all[List]
    // res9: Resolver[[A]IO[A], List[Int], List[Option[Person]]] = gql.resolver.Resolver@70089378

    // Every key must have an associated value
    // or else raise an error via a custom show-like typeclass
    implicit lazy val showMissingPersonId =
    ShowMissingKeys.showForKey[Int]("not all people could be found")
    pb.traversable[List]
    // res10: Resolver[[A]IO[A], List[Int], List[Person]] = gql.resolver.Resolver@57f04e4b

    // Maybe there is one value for one key?
    pb.opt
    // res11: Resolver[[A]IO[A], Int, Option[Person]] = gql.resolver.Resolver@7d56f745

    // Same as opt
    pb.all[cats.Id]
    // res12: Resolver[[A]IO[A], cats.package.Id[Int], cats.package.Id[Option[Person]]] = gql.resolver.Resolver@4d5270cd

    // There is always one value for one key
    pb.one
    // res13: Resolver[[A]IO[A], Int, Person] = gql.resolver.Resolver@79f5b94c

    // You can be more explicit via the `batch` method
    pb.batch.all[NonEmptyList]
    // res14: Resolver[[A]IO[A], NonEmptyList[Int], NonEmptyList[Option[Person]]] = gql.resolver.Resolver@2ca2222f

    Using batch aids with better compiler error messages.

    Resolver.lift[IO, Int](_.toString()).batch.all
    // error: Cannot prove that Set[K] =:= Int.
    // Resolver.lift[IO, Int](_.toString()).batch.all
    // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    tip

    For larger programs, consider declaring all your batchers up-front and putting them into some type of collection:

    case class MyBatchers(
    personBatcher: Resolver[IO, Set[Int], Map[Int, Person]],
    intStringBatcher: Resolver[IO, Set[Int], Map[Int, String]]
    )

    (b1, b2).mapN(MyBatchers.apply)
    // res16: State[SchemaState[IO], MyBatchers] = cats.data.IndexedStateT@386b52ad

    For most batchers it is likely that you eventually want to pre-compose them in various ways, for instance requsting args, which this pattern promotes.

    tip

    Sometimes you have multiple groups of fields in the same object where each group have different performance overheads.

    Say you had a Person object in your database. +Here is a showcase of some of the helper methods:

    def pb: Resolver[IO, Set[Int], Map[Int, Person]] = 
    // Stub implementation
    Resolver.lift(_ => Map.empty)

    // None if a key is missing
    pb.all[List]
    // res9: Resolver[[A]IO[A], List[Int], List[Option[Person]]] = gql.resolver.Resolver@4467dcd2

    // Every key must have an associated value
    // or else raise an error via a custom show-like typeclass
    implicit lazy val showMissingPersonId =
    ShowMissingKeys.showForKey[Int]("not all people could be found")
    pb.traversable[List]
    // res10: Resolver[[A]IO[A], List[Int], List[Person]] = gql.resolver.Resolver@588d87b0

    // Maybe there is one value for one key?
    pb.opt
    // res11: Resolver[[A]IO[A], Int, Option[Person]] = gql.resolver.Resolver@2f03c782

    // Same as opt
    pb.all[cats.Id]
    // res12: Resolver[[A]IO[A], cats.package.Id[Int], cats.package.Id[Option[Person]]] = gql.resolver.Resolver@13d4c874

    // There is always one value for one key
    pb.one
    // res13: Resolver[[A]IO[A], Int, Person] = gql.resolver.Resolver@69af0790

    // You can be more explicit via the `batch` method
    pb.batch.all[NonEmptyList]
    // res14: Resolver[[A]IO[A], NonEmptyList[Int], NonEmptyList[Option[Person]]] = gql.resolver.Resolver@5dabad12

    Using batch aids with better compiler error messages.

    Resolver.lift[IO, Int](_.toString()).batch.all
    // error: Cannot prove that Set[K] =:= Int.
    // Resolver.lift[IO, Int](_.toString()).batch.all
    // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    tip

    For larger programs, consider declaring all your batchers up-front and putting them into some type of collection:

    case class MyBatchers(
    personBatcher: Resolver[IO, Set[Int], Map[Int, Person]],
    intStringBatcher: Resolver[IO, Set[Int], Map[Int, String]]
    )

    (b1, b2).mapN(MyBatchers.apply)
    // res16: State[SchemaState[IO], MyBatchers] = cats.data.IndexedStateT@43b8091b

    For most batchers it is likely that you eventually want to pre-compose them in various ways, for instance requsting args, which this pattern promotes.

    tip

    Sometimes you have multiple groups of fields in the same object where each group have different performance overheads.

    Say you had a Person object in your database. This Person object also exists in a remote api. This remote api can tell you, the friends of a Person given the object's id and name. Written out a bit more structured we have that:

    • PersonId => PersonId (identity)
    • PersonId => PersonDB (database query)
    • PersonDB => PersonRemoteAPI (remote api call)
    • PersonId => PersonRemoteAPI (composition of database query and remote api call)

    And now put into code:

    // We have a trivial id field for our person id
    def pureFields = fields[IO, PersonId](
    "id" -> lift(id => id)
    )

    // If we query our database with a person id, we get a person database object
    case class PersonDB(
    id: PersonId,
    name: String,
    remoteApiId: String
    )

    // SELECT id, name, remote_api_id FROM person WHERE id in (...)
    def dbBatchResolver: Resolver[IO, PersonId, PersonDB] = ???

    // From the db we can get the name and the remote api id
    def dbFields = fields[IO, PersonDB](
    "name" -> lift(_.name),
    "apiId" -> lift(_.remoteApiId)
    )

    // The remote api data can be found given the result of a db query
    case class PersonRemoteAPI(
    id: PersonId,
    friends: List[PersonId]
    )

    // Given a PersonDB we can call the api (via a batched GET or something)
    def personBatchResolver: Resolver[IO, PersonDB, PersonRemoteAPI] = ???

    // We can get the friends from the remote api
    def remoteApiFields = fields[IO, PersonRemoteAPI](
    "friends" -> lift(_.friends)
    )

    // Now we can start composing our fields
    // We can align the types of the db and remote api data to the PersonDB type
    // by composing the remote api resolver on the remote api fields
    def dbFields2: Fields[IO, PersonDB] =
    remoteApiFields.compose(personBatchResolver) ::: dbFields

    // Given a PersonId we have every field
    // If "friends" is selected, gql will first run `dbBatchResolver` and then `personBatchResolver`
    def allFields = dbFields2.compose(dbBatchResolver) ::: pureFields

    implicit def person: Type[IO, PersonId] = tpeNel[IO, PersonId](
    "Person",
    allFields
    )

    The general pattern for this decomposition revolves around figuring out what the most basic description of your object is. -In this example, every fields can (eventually through various side-effects) be resolved from just PersonId.

    Batchers from elsewhere​

    Most batching implementations have compatible signatures and can be adapted into a gql batcher.

    For instance, converting fetch to gql:

    import fetch._
    object People extends Data[PersonId, Person] {
    def name = "People"

    def source: DataSource[IO, PersonId, Person] = ???
    }

    Resolver
    .batch[IO, PersonId, Person](_.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty)))
    // res17: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@799b07a8

    Inline batch​

    A batch resolver can also be defined inline with some notable differences to the regular batch resolver:

    • It does not need to be defined in state.
    • It is not subject to global query planning, and is only ever called with inputs from the same selection.

    The inline batch resolver has the same signature as a regular batch resolver; Set[K] => F[Map[K, V]].

    Resolver.inlineBatch[IO, PersonId, Person](
    _.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty))
    )
    // res18: Resolver[IO, Set[PersonId], Map[PersonId, Person]] = gql.resolver.Resolver@73fe0342

    Choice​

    Resolvers also implement Choice via (Resolver[F, A, C], Resolver[F, B, D]) => Resolver[F, Either[A, B], Either[C, D]]. +In this example, every fields can (eventually through various side-effects) be resolved from just PersonId.

    Batchers from elsewhere​

    Most batching implementations have compatible signatures and can be adapted into a gql batcher.

    For instance, converting fetch to gql:

    import fetch._
    object People extends Data[PersonId, Person] {
    def name = "People"

    def source: DataSource[IO, PersonId, Person] = ???
    }

    Resolver
    .batch[IO, PersonId, Person](_.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty)))
    // res17: State[SchemaState[IO], Resolver[IO, Set[PersonId], Map[PersonId, Person]]] = cats.data.IndexedStateT@1b284b1b

    Inline batch​

    A batch resolver can also be defined inline with some notable differences to the regular batch resolver:

    • It does not need to be defined in state.
    • It is not subject to global query planning, and is only ever called with inputs from the same selection.

    The inline batch resolver has the same signature as a regular batch resolver; Set[K] => F[Map[K, V]].

    Resolver.inlineBatch[IO, PersonId, Person](
    _.toList.toNel.traverse(People.source.batch).map(_.getOrElse(Map.empty))
    )
    // res18: Resolver[IO, Set[PersonId], Map[PersonId, Person]] = gql.resolver.Resolver@64d638e3

    Choice​

    Resolvers also implement Choice via (Resolver[F, A, C], Resolver[F, B, D]) => Resolver[F, Either[A, B], Either[C, D]]. On the surface, this combinator may have limited uses, but with a bit of composition we can perform tasks such as caching.

    For instance, a combinator derived from Choice is skippable: Resolver[F, I, O] => Resolver[F, Either[I, O], O], which acts as a variant of "caching". If the right side is present we skip the underlying resolver (Resolver[F, I, O]) altogether.

    For any resolver in the form Resolver[F, I, Either[L, R]] we modify the left side with leftThrough and the right with rightThrough.

    For Instance we can implement caching.

    def getPersonForId(id: PersonId): IO[Person] = ???

    type CachedPerson = Either[PersonId, Person]
    def cachedPerson = tpe[IO, CachedPerson](
    "Person",
    "id" -> lift(_.map(_.id).merge.value),
    // We'll align the left and right side of the choice and then merge the `Either`
    "name" -> build[IO, CachedPerson](_.leftThrough(_.evalMap(getPersonForId)).map(_.merge.name))
    )

    We can also use some of the compose tricks from the batch resolver syntax section if we have a lot of fields that depend on Person.

    note

    The query planner treats the choice branches as parallel, such that for two instances of a choice, resolvers in the two branches may be batched together.

    Stream​

    The stream resolver embeds an fs2.Stream and provides the ability to emit a stream of results for a graphql subscription.

    Stream semantics​

    • When one or more streams emit, the interpreter will re-evaluate the query from the position that emitted. That is, only the sub-tree that changed will be re-interpreted.
    • If two streams emit and one occurs as a child of the other, the child will be ignored since it will be replaced.
    • By default, the interpreter will only respect the most-recent emitted data.

    This means that by default, gql assumes that your stream should behave like a signal, not sequentially. @@ -35,9 +35,9 @@ This means that gql must be able to pull one element before closing the old one.

    tip

    If you have streams of updates where you are only interested in that something changed (Stream[F, Unit]) there may be room for significant optimization. In fs2 you can merge streams with combinators such as parJoin, but they have to assume that there may be resources to account for. If you are discarding the output of the stream or you are absolutely sure that the output does not depend on a resource lifetime, -one can write more optimized versions functions for this purpose.

    Some examples of potentially more performant implementations

    In a crude benchmarks, these combinators may perform an order of magnitude faster than parJoin or merge.

    import fs2.{Pipe, Stream}
    import fs2.concurrent._
    def parListen[A]: Pipe[IO, Stream[IO, A], Unit] =
    streams =>
    for {
    d <- Stream.eval(IO.deferred[Either[Throwable, Unit]])
    c <- Stream.eval(IO.deferred[Unit])
    sigRef <- Stream.eval(SignallingRef[IO, Unit](()))

    bg = streams.flatMap { sub =>
    Stream.supervise {
    sub
    .evalMap(_ => sigRef.set(()))
    .compile
    .drain
    .onError(e => d.complete(Left(e)).void)
    .onCancel(c.complete(()).void)
    }.void
    }

    listenCancel = (c.get *> IO.canceled).as(Right(()): Either[Throwable, Unit])
    fg = sigRef.discrete.interruptWhen(d).interruptWhen(listenCancel)

    _ <- fg.concurrently(bg)
    } yield ()

    def parListenSignal[A]: Pipe[IO, Stream[IO, A], A] =
    streams =>
    Stream.eval(SignallingRef.of[IO, Option[A]](None)).flatMap { sig =>
    sig.discrete.unNone.concurrently {
    streams.parEvalMapUnorderedUnbounded { x =>
    x.evalMap(x => sig.set(Some(x))).compile.drain
    }
    }
    }

    Here is an example of some streams in action:

    import scala.concurrent.duration._
    import cats.effect.unsafe.implicits.global

    case class Streamed(value: Int)

    implicit lazy val streamed: Type[IO, Streamed] = tpe[IO, Streamed](
    "Streamed",
    "value" -> build[IO, Streamed](_.streamMap{ s =>
    fs2.Stream
    .bracket(IO(println(s"allocating $s")))(_ => IO(println(s"releasing $s"))) >>
    fs2.Stream
    .iterate(0)(_ + 1)
    .evalTap(n => IO(println(s"emitting $n for $s")))
    .meteredStartImmediately(((5 - s.value) * 20).millis)
    .as(Streamed(s.value + 1))
    })
    )

    def query = """
    subscription {
    streamed {
    value {
    value {
    value {
    __typename
    }
    }
    }
    }
    }
    """

    def schema = SchemaShape.unit[IO](
    fields("ping" -> lift(_ => "pong")),
    subscription = Some(fields("streamed" -> lift(_ => Streamed(0))))
    )

    Schema.simple(schema)
    .map(Compiler[IO].compile(_, query))
    .flatMap { case Right(Application.Subscription(stream)) => stream.take(4).compile.drain }
    .unsafeRunSync()
    // allocating Streamed(0)
    // emitting 0 for Streamed(0)
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // emitting 1 for Streamed(2)
    // emitting 1 for Streamed(1)
    // emitting 1 for Streamed(0)
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // emitting 2 for Streamed(2)
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // releasing Streamed(1)
    // releasing Streamed(2)
    // emitting 2 for Streamed(1)
    // emitting 1 for Streamed(2)
    // releasing Streamed(0)
    // releasing Streamed(2)
    // releasing Streamed(2)
    // releasing Streamed(1)

    gql also allows the user to specify how much time the interpreter may await more stream updates:

    Schema.simple(schema).map(Compiler[IO].compile(_, query, accumulate=Some(10.millis)))

    furthermore, gql can also emit interpreter information if you want to look into what gql is doing:

    Schema.simple(schema)
    .map(Compiler[IO].compile(_, query, debug=gql.server.interpreter.DebugPrinter[IO](s => IO(println(s)))))
    .flatMap { case Right(Application.Subscription(stream)) => stream.take(3).compile.drain }
    .unsafeRunSync()
    // allocating Streamed(0)
    // emitting 0 for Streamed(0)
    // publishing at index 0 at root.streamed.value
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // publishing at index 0 at root.streamed.value.value
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // publishing at index 0 at root.streamed.value.value.value
    // unconsing with current tree:
    // |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a
    // got state, awaiting a non-empty state (publication)
    // emitting 1 for Streamed(2)
    // publishing at index 1 at root.streamed.value.value.value
    // done publishing at index 1 at root.streamed.value.value.value, await? true
    // got non-empty state, awaiting 5 milliseconds
    // unconsed:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = StreamData(
    // cont = Continuation.Done(
    // Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = __typename,
    // alias = None,
    // cont = PreparedCont(
    // edges = Lift(...),
    // cont = PreparedLeaf(String)
    // )
    // )
    // }
    // )
    // )
    // ),
    // value = Right(repl.MdocSession$MdocApp$Streamed$1)
    // )
    // )
    // ]
    // emitting 1 for Streamed(1)
    // unconsed after removing old children:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = ditto
    // )
    // ]
    // tree after unconsing:
    // |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a
    // publishing at index 1 at root.streamed.value.value
    // done publishing at index 1 at root.streamed.value.value, await? true
    // emitting 1 elements from uncons
    // interpreting for 1 inputs
    // done interpreting
    // unconsing with current tree:
    // |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a
    // got state, awaiting a non-empty state (publication)
    // got non-empty state, awaiting 5 milliseconds
    // emitting 1 for Streamed(0)
    // publishing at index 1 at root.streamed.value
    // done publishing at index 1 at root.streamed.value, await? true
    // unconsed:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = StreamData(
    // cont = Continuation.Done(
    // Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = value,
    // alias = None,
    // cont = PreparedCont(
    // edges = Compose(
    // left = Compose(left = Lift(...), right = Lift(...)),
    // right = EmbedStream(signal = true)
    // ),
    // cont = Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = __typename,
    // alias = None,
    // cont = PreparedCont(
    // edges = Lift(...),
    // cont = PreparedLeaf(String)
    // )
    // )
    // }
    // )
    // )
    // )
    // )
    // }
    // )
    // )
    // ),
    // value = Right(repl.MdocSession$MdocApp$Streamed$1)
    // )
    // )
    // ]
    // unconsed after removing old children:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = ditto
    // )
    // ]
    // tree after unconsing:
    // |- unknown-cats.effect.kernel.Unique$Token@5dc85f7a
    // emitting 1 elements from uncons
    // interpreting for 1 inputs
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // publishing at index 0 at root.streamed.value.value.value
    // done interpreting
    // releasing Streamed(1)
    // releasing Streamed(2)
    // releasing Streamed(0)
    // releasing Streamed(2)

    Steps​

    A Step is the low-level algebra for a resolver, that describes a single step of evaluation for a query. +one can write more optimized versions functions for this purpose.

    Some examples of potentially more performant implementations

    In a crude benchmarks, these combinators may perform an order of magnitude faster than parJoin or merge.

    import fs2.{Pipe, Stream}
    import fs2.concurrent._
    def parListen[A]: Pipe[IO, Stream[IO, A], Unit] =
    streams =>
    for {
    d <- Stream.eval(IO.deferred[Either[Throwable, Unit]])
    c <- Stream.eval(IO.deferred[Unit])
    sigRef <- Stream.eval(SignallingRef[IO, Unit](()))

    bg = streams.flatMap { sub =>
    Stream.supervise {
    sub
    .evalMap(_ => sigRef.set(()))
    .compile
    .drain
    .onError(e => d.complete(Left(e)).void)
    .onCancel(c.complete(()).void)
    }.void
    }

    listenCancel = (c.get *> IO.canceled).as(Right(()): Either[Throwable, Unit])
    fg = sigRef.discrete.interruptWhen(d).interruptWhen(listenCancel)

    _ <- fg.concurrently(bg)
    } yield ()

    def parListenSignal[A]: Pipe[IO, Stream[IO, A], A] =
    streams =>
    Stream.eval(SignallingRef.of[IO, Option[A]](None)).flatMap { sig =>
    sig.discrete.unNone.concurrently {
    streams.parEvalMapUnorderedUnbounded { x =>
    x.evalMap(x => sig.set(Some(x))).compile.drain
    }
    }
    }

    Here is an example of some streams in action:

    import scala.concurrent.duration._
    import cats.effect.unsafe.implicits.global

    case class Streamed(value: Int)

    implicit lazy val streamed: Type[IO, Streamed] = tpe[IO, Streamed](
    "Streamed",
    "value" -> build[IO, Streamed](_.streamMap{ s =>
    fs2.Stream
    .bracket(IO(println(s"allocating $s")))(_ => IO(println(s"releasing $s"))) >>
    fs2.Stream
    .iterate(0)(_ + 1)
    .evalTap(n => IO(println(s"emitting $n for $s")))
    .meteredStartImmediately(((5 - s.value) * 20).millis)
    .as(Streamed(s.value + 1))
    })
    )

    def query = """
    subscription {
    streamed {
    value {
    value {
    value {
    __typename
    }
    }
    }
    }
    }
    """

    def schema = SchemaShape.unit[IO](
    fields("ping" -> lift(_ => "pong")),
    subscription = Some(fields("streamed" -> lift(_ => Streamed(0))))
    )

    Schema.simple(schema)
    .map(Compiler[IO].compile(_, query))
    .flatMap { case Right(Application.Subscription(stream)) => stream.take(4).compile.drain }
    .unsafeRunSync()
    // allocating Streamed(0)
    // emitting 0 for Streamed(0)
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // emitting 1 for Streamed(2)
    // emitting 1 for Streamed(1)
    // emitting 1 for Streamed(0)
    // allocating Streamed(2)
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // emitting 0 for Streamed(2)
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // emitting 2 for Streamed(2)
    // emitting 2 for Streamed(1)
    // emitting 1 for Streamed(2)
    // allocating Streamed(2)
    // emitting 1 for Streamed(2)
    // emitting 0 for Streamed(2)
    // emitting 2 for Streamed(0)
    // releasing Streamed(1)
    // emitting 3 for Streamed(2)
    // releasing Streamed(2)
    // releasing Streamed(2)
    // releasing Streamed(0)
    // releasing Streamed(2)
    // releasing Streamed(2)
    // releasing Streamed(1)

    gql also allows the user to specify how much time the interpreter may await more stream updates:

    Schema.simple(schema).map(Compiler[IO].compile(_, query, accumulate=Some(10.millis)))

    furthermore, gql can also emit interpreter information if you want to look into what gql is doing:

    Schema.simple(schema)
    .map(Compiler[IO].compile(_, query, debug=gql.server.interpreter.DebugPrinter[IO](s => IO(println(s)))))
    .flatMap { case Right(Application.Subscription(stream)) => stream.take(3).compile.drain }
    .unsafeRunSync()
    // allocating Streamed(0)
    // emitting 0 for Streamed(0)
    // publishing at index 0 at root.streamed.value
    // allocating Streamed(1)
    // emitting 0 for Streamed(1)
    // publishing at index 0 at root.streamed.value.value
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // publishing at index 0 at root.streamed.value.value.value
    // unconsing with current tree:
    // |- unknown-cats.effect.kernel.Unique$Token@5085ad30
    // got state, awaiting a non-empty state (publication)
    // emitting 1 for Streamed(2)
    // publishing at index 1 at root.streamed.value.value.value
    // done publishing at index 1 at root.streamed.value.value.value, await? true
    // got non-empty state, awaiting 5 milliseconds
    // unconsed:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = StreamData(
    // cont = Continuation.Done(
    // Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = __typename,
    // alias = None,
    // cont = PreparedCont(
    // edges = Lift(...),
    // cont = PreparedLeaf(String)
    // )
    // )
    // }
    // )
    // )
    // ),
    // value = Right(repl.MdocSession$MdocApp$Streamed$1)
    // )
    // )
    // ]
    // emitting 1 for Streamed(1)
    // publishing at index 1 at root.streamed.value.value
    // done publishing at index 1 at root.streamed.value.value, await? true
    // unconsed after removing old children:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = ditto
    // )
    // ]
    // tree after unconsing:
    // |- unknown-cats.effect.kernel.Unique$Token@5085ad30
    // emitting 1 elements from uncons
    // interpreting for 1 inputs
    // done interpreting
    // unconsing with current tree:
    // |- unknown-cats.effect.kernel.Unique$Token@5085ad30
    // got state, awaiting a non-empty state (publication)
    // got non-empty state, awaiting 5 milliseconds
    // emitting 1 for Streamed(0)
    // publishing at index 1 at root.streamed.value
    // done publishing at index 1 at root.streamed.value, await? true
    // unconsed:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = StreamData(
    // cont = Continuation.Done(
    // Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = value,
    // alias = None,
    // cont = PreparedCont(
    // edges = Compose(
    // left = Compose(left = Lift(...), right = Lift(...)),
    // right = EmbedStream(signal = true)
    // ),
    // cont = Selection(
    // PreparedSpecification(
    // typename = Streamed,
    // selections = PreparedSelections{
    // PreparedDataField(
    // name = __typename,
    // alias = None,
    // cont = PreparedCont(
    // edges = Lift(...),
    // cont = PreparedLeaf(String)
    // )
    // )
    // }
    // )
    // )
    // )
    // )
    // }
    // )
    // )
    // ),
    // value = Right(repl.MdocSession$MdocApp$Streamed$1)
    // )
    // )
    // ]
    // unconsed after removing old children:
    // [
    // ResourceInfo(
    // parentName = root.streamed.value.value (signal = true),
    // name = resource-1,
    // open = true,
    // value = ditto
    // )
    // ]
    // tree after unconsing:
    // |- unknown-cats.effect.kernel.Unique$Token@5085ad30
    // emitting 1 elements from uncons
    // interpreting for 1 inputs
    // allocating Streamed(2)
    // emitting 0 for Streamed(2)
    // publishing at index 0 at root.streamed.value.value.value
    // done interpreting
    // releasing Streamed(0)
    // releasing Streamed(1)
    // releasing Streamed(2)
    // releasing Streamed(2)

    Steps​

    A Step is the low-level algebra for a resolver, that describes a single step of evaluation for a query. The variants of Step are clearly listed in the source code. All variants of step provide orthogonal properties.

    - + \ No newline at end of file diff --git a/docs/server/schema/structuring_apps/index.html b/docs/server/schema/structuring_apps/index.html index 3e367829..a89f3e94 100644 --- a/docs/server/schema/structuring_apps/index.html +++ b/docs/server/schema/structuring_apps/index.html @@ -4,15 +4,15 @@ Structuring large applications | gql - +

    Structuring large applications

    The documentation explores smaller examples. To host larger graphs there are some considerations that must be addressed.

    • What up-front work can be done to minimize the overhead in introducing new types.
    • How is (mutual) recursion handled between different domains.

    Recursive datatypes are notoriously difficult to deal with. -In functional programming lazyness is often exploited as a solution to introduce cyclic data, but can easily accidentally introduce infinite recursion.

    Seperating domains​

    Partially applying all needed dependencies can be expressed with a class.

    import cats.effect._
    import gql._
    import gql.ast._
    import gql.dsl._

    final case class Organization(
    id: String,
    name: String
    )

    final case class User(
    id: String,
    name: String,
    organizationId: String
    )

    trait Repo {
    def getUser(id: String): IO[User]
    def getOrganization(id: String): IO[Organization]
    def getOrganizationUsers(organizationId: String): IO[List[User]]
    }

    class UserTypes(repo: Repo) {
    // notice how we bind the effect (IO) so that we can omit this parameter in the dsl
    val dsl = new GqlDsl[IO] {}
    import dsl._

    implicit val organization: Type[IO, Organization] =
    tpe[Organization](
    "Organization",
    "id" -> lift(_.id),
    "name" -> lift(_.name),
    "users" -> eff(x => repo.getOrganizationUsers(x.id))
    )

    implicit val user: Type[IO, User] =
    tpe[User](
    "User",
    "id" -> lift(_.id),
    "name" -> lift(_.name),
    "organization" -> eff(x => repo.getOrganization(x.organizationId))
    )
    }
    You can also extend the dsl if you prefer a more object oriented style.
    class UserTypes(repo: Repo) extends GqlDsl[IO] {
    // ...
    }

    Mutually recursive domains​

    Subgraphs can neatly packaged into classes, but that does not address the issue of recursion between different domains.

    Call by name constructor parameters​

    A compositional approach is to use call by name constructor parameters to lazily pass mutually recursive dependencies.

    class UserTypes(paymentTypes: => PaymentTypes) {
    lazy val p = paymentTypes
    import p._
    // ...
    }

    class PaymentTypes(userTypes: => UserTypes) {
    lazy val u = userTypes
    import u._
    // ...
    }

    lazy val userTypes: UserTypes = new UserTypes(paymentTypes)
    lazy val paymentTypes: PaymentTypes = new PaymentTypes(userTypes)
    tip

    When domain types are defined in seperate projects, OOP interfaces can be used to implement mutual recursion.

    // core project
    trait User
    trait UserTypes {
    // we can also choose to only expose the datatypes that are necessary
    implicit def userType: Type[IO, User]
    }
    trait Payment
    trait PaymentTypes {
    implicit def paymentType: Type[IO, Payment]
    }

    // user project
    class UserTypesImpl(paymentTypes: => PaymentTypes) extends UserTypes {
    lazy val p = paymentTypes
    import p._
    def userType: Type[IO, User] = ???
    }

    // payment project
    class PaymentTypesImpl(userTypes: => UserTypes) extends PaymentTypes {
    lazy val u = userTypes
    import u._
    def paymentType: Type[IO, Payment] = ???
    }

    // main project
    lazy val userTypes: UserTypes = new UserTypesImpl(paymentTypes)
    lazy val paymentTypes: PaymentTypes = new PaymentTypesImpl(userTypes)

    Cake​

    The cake pattern can also be used to define mutually recursive dependencies, at the cost of composability.

    // core project
    trait User
    trait UserTypes {
    // we can also choose to only expose the datatypes that are necessary
    implicit def userType: Type[IO, User]
    }
    trait Payment
    trait PaymentTypes {
    implicit def paymentType: Type[IO, Payment]
    }

    // user project
    trait UserTypesImpl extends UserTypes { self: PaymentTypes =>
    import self._
    def userType: Type[IO, User] = ???
    }

    // payment project
    trait PaymentTypesImpl extends PaymentTypes { self: UserTypes =>
    import self._
    def paymentType: Type[IO, Payment] = ???
    }

    // main project
    val allTypes = new UserTypesImpl with PaymentTypesImpl { }
    // allTypes: AnyRef with UserTypesImpl with PaymentTypesImpl = repl.MdocSession$MdocApp$$anon$2@194d88ef
    - +In functional programming lazyness is often exploited as a solution to introduce cyclic data, but can easily accidentally introduce infinite recursion.

    Seperating domains​

    Partially applying all needed dependencies can be expressed with a class.

    import cats.effect._
    import gql._
    import gql.ast._
    import gql.dsl._

    final case class Organization(
    id: String,
    name: String
    )

    final case class User(
    id: String,
    name: String,
    organizationId: String
    )

    trait Repo {
    def getUser(id: String): IO[User]
    def getOrganization(id: String): IO[Organization]
    def getOrganizationUsers(organizationId: String): IO[List[User]]
    }

    class UserTypes(repo: Repo) {
    // notice how we bind the effect (IO) so that we can omit this parameter in the dsl
    val dsl = new GqlDsl[IO] {}
    import dsl._

    implicit val organization: Type[IO, Organization] =
    tpe[Organization](
    "Organization",
    "id" -> lift(_.id),
    "name" -> lift(_.name),
    "users" -> eff(x => repo.getOrganizationUsers(x.id))
    )

    implicit val user: Type[IO, User] =
    tpe[User](
    "User",
    "id" -> lift(_.id),
    "name" -> lift(_.name),
    "organization" -> eff(x => repo.getOrganization(x.organizationId))
    )
    }
    You can also extend the dsl if you prefer a more object oriented style.
    class UserTypes(repo: Repo) extends GqlDsl[IO] {
    // ...
    }

    Mutually recursive domains​

    Subgraphs can neatly packaged into classes, but that does not address the issue of recursion between different domains.

    Call by name constructor parameters​

    A compositional approach is to use call by name constructor parameters to lazily pass mutually recursive dependencies.

    class UserTypes(paymentTypes: => PaymentTypes) {
    lazy val p = paymentTypes
    import p._
    // ...
    }

    class PaymentTypes(userTypes: => UserTypes) {
    lazy val u = userTypes
    import u._
    // ...
    }

    lazy val userTypes: UserTypes = new UserTypes(paymentTypes)
    lazy val paymentTypes: PaymentTypes = new PaymentTypes(userTypes)
    tip

    When domain types are defined in seperate projects, OOP interfaces can be used to implement mutual recursion.

    // core project
    trait User
    trait UserTypes {
    // we can also choose to only expose the datatypes that are necessary
    implicit def userType: Type[IO, User]
    }
    trait Payment
    trait PaymentTypes {
    implicit def paymentType: Type[IO, Payment]
    }

    // user project
    class UserTypesImpl(paymentTypes: => PaymentTypes) extends UserTypes {
    lazy val p = paymentTypes
    import p._
    def userType: Type[IO, User] = ???
    }

    // payment project
    class PaymentTypesImpl(userTypes: => UserTypes) extends PaymentTypes {
    lazy val u = userTypes
    import u._
    def paymentType: Type[IO, Payment] = ???
    }

    // main project
    lazy val userTypes: UserTypes = new UserTypesImpl(paymentTypes)
    lazy val paymentTypes: PaymentTypes = new PaymentTypesImpl(userTypes)

    Cake​

    The cake pattern can also be used to define mutually recursive dependencies, at the cost of composability.

    // core project
    trait User
    trait UserTypes {
    // we can also choose to only expose the datatypes that are necessary
    implicit def userType: Type[IO, User]
    }
    trait Payment
    trait PaymentTypes {
    implicit def paymentType: Type[IO, Payment]
    }

    // user project
    trait UserTypesImpl extends UserTypes { self: PaymentTypes =>
    import self._
    def userType: Type[IO, User] = ???
    }

    // payment project
    trait PaymentTypesImpl extends PaymentTypes { self: UserTypes =>
    import self._
    def paymentType: Type[IO, Payment] = ???
    }

    // main project
    val allTypes = new UserTypesImpl with PaymentTypesImpl { }
    // allTypes: AnyRef with UserTypesImpl with PaymentTypesImpl = repl.MdocSession$MdocApp$$anon$2@3f7e25e7
    + \ No newline at end of file diff --git a/docs/tutorial/index.html b/docs/tutorial/index.html index dc9875ec..be5bf202 100644 --- a/docs/tutorial/index.html +++ b/docs/tutorial/index.html @@ -4,7 +4,7 @@ Tutorial | gql - + @@ -19,7 +19,7 @@ This field is available on all types and interfaces.
  • The ... on syntax is used to pattern match on specific types. Since the hero returns a Character interface we must match it to a Droid to get the primaryFunction field.
  • The fragment syntax is used to define a reusable block of fields akin to a CTE in SQL.
  • Now let us introduce the query in scala:

    def query = """
    query {
    hero(episode: NEWHOPE) {
    id
    name
    __typename
    ... on Droid {
    primaryFunction
    friends {
    name
    __typename
    appearsIn
    }
    }
    ... HumanDetails
    }
    c3po: droid(id: "2000") {
    name
    }
    }

    fragment HumanDetails on Human {
    homePlanet
    }
    """

    Finally we can parse, plan and evaluate the query:

    import io.circe.syntax._
    (new StarWarsSchema[IO](repo))
    .makeSchema
    .map(Compiler[IO].compile(_, query))
    .flatMap { case Right(Application.Query(run)) => run.map(_.asJson) }
    // {
    // "data" : {
    // "c3po" : {
    // "name" : "C-3PO"
    // },
    // "hero" : {
    // "name" : "R2-D2",
    // "__typename" : "Droid",
    // "primaryFunction" : "Astromech",
    // "id" : "2001",
    // "friends" : [
    // {
    // "__typename" : "Human",
    // "appearsIn" : [
    // "NEWHOPE",
    // "EMPIRE",
    // "JEDI"
    // ],
    // "name" : "Luke Skywalker"
    // },
    // {
    // "__typename" : "Human",
    // "appearsIn" : [
    // "NEWHOPE",
    // "EMPIRE",
    // "JEDI"
    // ],
    // "name" : "Han Solo"
    // },
    // {
    // "__typename" : "Human",
    // "appearsIn" : [
    // "NEWHOPE",
    // "EMPIRE",
    // "JEDI"
    // ],
    // "name" : "Leia Organa"
    // }
    // ]
    // }
    // }
    // }

    And that's the end of this tutorial! The docs contain more examples and information about the library, so be sure to check them out.

    - + \ No newline at end of file diff --git a/index.html b/index.html index 0868ebb6..724c32c1 100644 --- a/index.html +++ b/index.html @@ -4,13 +4,13 @@ gql - +

    gql

    A functional server and client GraphQL implementation for Scala

    Powerful algebraic resolvers

    "friends" -> resolve(_
    .evalMap(getFriends)
    .streamMap(is => peopleEvents(is.map(_.id)))
    .rethrow
    .arg(limitArg) andThen batchGetPeople
    )

    gql distills what it means to be a GraphQL resolver into a concise, well-behaved algebra that composes.

    Declarative schema definition

    tpe[IO, Person](
    "Person",
    "name" -> lift(_.name),
    "friends" -> eff(p => getFriends(p.id))
    )

    gql comes with syntax and a DSL for succinctly defining schemas.

    Typed functional graphql

    (
    arg[String]("firstName"),
    arg[String]("lastName")
    ).mapN(_ + " " + _)

    gql adopts a simple and predictable approach to GraphQL. Every aspect has been crafted to minimize friction by employing the proper structures.

    Query planning

    slowFields ::: fastFields.contramap(...)

    gql features a query planner heuristic that enables better-than-naive query performance and an expressive batching api that helps the user optimize their schema in a complely typed functional manner.

    Signal based subscriptions

    "data" -> resolve(_
    .streamMap(subscribeToIds)
    .andThen(batchGetData)
    .streamMap(subscribeToSubIds)
    )

    gql features an unusual subscription model that is instead based on signals. That is, streams or even resources of data can appear anywhere in the schema and gql will efficiently re-execute the query and handle resource leasing.

    Easy to extend

    gql.http4s.Http4sRoutes.ws(queryCompiler, _)

    gql is designed to be easily extended with new features. Want to provide a custom query planner or calculate cost estimates yourself? No problem.

    gql also comes with some pre-built extensions such as http4s integration, graphql-ws, tracing and global object identification.

    Client-side dsl

    fragment("PersonFragment", "Person") {
    (
    sel[String]("name"),
    sel[Option[Int]]("age")
    ).mapN(Person.apply)
    }

    gql also features a client which can either be declared via the dsl or code generated from a graphql query.

    gql is modular and as such, client queries can be validated against the same implementation rules as a gql server.

    - + \ No newline at end of file diff --git a/markdown-page/index.html b/markdown-page/index.html index 258d922c..ad557361 100644 --- a/markdown-page/index.html +++ b/markdown-page/index.html @@ -4,13 +4,13 @@ Markdown page example | gql - +

    Markdown page example

    You don't need React to write simple standalone pages.

    - + \ No newline at end of file