From 648c018f921e5f27fb540fa2e8f3d71e0875e3fa Mon Sep 17 00:00:00 2001 From: Kiryl Mialeshka <8974488+meskill@users.noreply.github.com> Date: Mon, 26 Aug 2024 16:10:21 +0200 Subject: [PATCH 1/8] refactor: store __typename in value instead of separate entity (#2626) --- src/core/blueprint/into_schema.rs | 39 ++-- src/core/ir/discriminator.rs | 179 +++++++++--------- src/core/ir/eval.rs | 12 +- src/core/ir/eval_context.rs | 12 -- src/core/jit/builder.rs | 22 ++- src/core/jit/common/jp.rs | 6 +- src/core/jit/error.rs | 2 - src/core/jit/exec.rs | 69 ++----- src/core/jit/exec_const.rs | 9 +- src/core/jit/response.rs | 4 +- ...se__test__conversion_to_async_graphql.snap | 2 +- src/core/jit/synth/synth.rs | 67 ++----- src/core/json/borrow.rs | 21 ++ src/core/json/graphql.rs | 23 ++- src/core/json/json_like.rs | 3 + src/core/json/json_like_list.rs | 28 +++ src/core/json/mod.rs | 2 + src/core/json/serde.rs | 20 ++ tests/core/snapshots/test-union.md_5.snap | 45 +++++ tests/execution/test-union.md | 28 +++ 20 files changed, 346 insertions(+), 247 deletions(-) create mode 100644 src/core/json/json_like_list.rs create mode 100644 tests/core/snapshots/test-union.md_5.snap diff --git a/src/core/blueprint/into_schema.rs b/src/core/blueprint/into_schema.rs index cbfdb681f3..31fe06a964 100644 --- a/src/core/blueprint/into_schema.rs +++ b/src/core/blueprint/into_schema.rs @@ -1,7 +1,6 @@ use std::borrow::Cow; use std::sync::Arc; -use anyhow::{bail, Result}; use async_graphql::dynamic::{self, FieldFuture, FieldValue, SchemaBuilder}; use async_graphql::ErrorExtensions; use async_graphql_value::ConstValue; @@ -11,7 +10,7 @@ use tracing::Instrument; use crate::core::blueprint::{Blueprint, Definition, Type}; use crate::core::http::RequestContext; -use crate::core::ir::{EvalContext, ResolverContext, TypeName}; +use crate::core::ir::{EvalContext, ResolverContext, TypedValue}; use crate::core::scalar; fn to_type_ref(type_of: &Type) -> dynamic::TypeRef { @@ -59,27 +58,21 @@ fn set_default_value( } } -fn to_field_value<'a>( - ctx: &mut EvalContext<'a, ResolverContext<'a>>, - value: async_graphql::Value, -) -> Result> { - let type_name = ctx.type_name.take(); - - Ok(match (value, type_name) { - // NOTE: Mostly type_name is going to be None so we should keep that as the first check. - (value, None) => FieldValue::from(value), - (ConstValue::List(values), Some(TypeName::Vec(names))) => FieldValue::list( - values - .into_iter() - .zip(names) - .map(|(value, type_name)| FieldValue::from(value).with_type(type_name)), - ), - (value @ ConstValue::Object(_), Some(TypeName::Single(type_name))) => { - FieldValue::from(value).with_type(type_name) +fn to_field_value(value: async_graphql::Value) -> FieldValue<'static> { + match value { + ConstValue::List(vec) => FieldValue::list(vec.into_iter().map(to_field_value)), + value => { + let type_name = value.get_type_name().map(|s| s.to_string()); + + let field_value = FieldValue::from(value); + + if let Some(type_name) = type_name { + field_value.with_type(type_name) + } else { + field_value + } } - (ConstValue::Null, _) => FieldValue::NULL, - (_, Some(_)) => bail!("Failed to match type_name"), - }) + } } fn to_type(def: &Definition) -> dynamic::Type { @@ -131,7 +124,7 @@ fn to_type(def: &Definition) -> dynamic::Type { if let ConstValue::Null = value { Ok(FieldValue::NONE) } else { - Ok(Some(to_field_value(ctx, value)?)) + Ok(Some(to_field_value(value))) } } .instrument(span) diff --git a/src/core/ir/discriminator.rs b/src/core/ir/discriminator.rs index b8907c70c8..bfd7511392 100644 --- a/src/core/ir/discriminator.rs +++ b/src/core/ir/discriminator.rs @@ -8,17 +8,40 @@ use indenter::indented; use indexmap::IndexMap; use crate::core::config::Type; +use crate::core::json::{JsonLike, JsonObjectLike}; use crate::core::valid::{Cause, Valid, Validator}; -/// Represents the type name for the resolved value. -/// It is used when the GraphQL executor needs to resolve values of a union -/// type. In order to select the correct fields, the executor must know the -/// exact type name for each resolved value. When the output is a list of a -/// union type, it should resolve the exact type for every entry in the list. -#[derive(PartialEq, Eq, Debug, Clone)] -pub enum TypeName { - Single(String), - Vec(Vec), +pub trait TypedValue<'a> { + type Error; + + fn get_type_name(&'a self) -> Option<&'a str>; + fn set_type_name(&'a mut self, type_name: String) -> Result<(), Self::Error>; +} + +const TYPENAME_FIELD: &str = "__typename"; + +impl<'json, T> TypedValue<'json> for T +where + T: JsonLike<'json>, + T::JsonObject<'json>: JsonObjectLike<'json, Value = T>, +{ + type Error = anyhow::Error; + + fn get_type_name(&'json self) -> Option<&'json str> { + self.as_object() + .and_then(|obj| obj.get_key(TYPENAME_FIELD)) + .and_then(|val| val.as_str()) + } + + fn set_type_name(&'json mut self, type_name: String) -> Result<(), Self::Error> { + if let Some(obj) = self.as_object_mut() { + obj.insert_key(TYPENAME_FIELD, T::string(type_name.into())); + + Ok(()) + } else { + bail!("Expected object") + } + } } /// Resolver for type member of a union. @@ -214,22 +237,7 @@ impl Discriminator { Valid::succeed(discriminator) } - pub fn resolve_type(&self, value: &Value) -> Result { - if let Value::List(list) = value { - let results: Result> = list - .iter() - .map(|item| Ok(self.resolve_type_for_single(item)?.to_string())) - .collect(); - - Ok(TypeName::Vec(results?)) - } else { - Ok(TypeName::Single( - self.resolve_type_for_single(value)?.to_string(), - )) - } - } - - fn resolve_type_for_single(&self, value: &Value) -> Result<&str> { + pub fn resolve_type(&self, value: &Value) -> Result<&str> { let Value::Object(obj) = value else { bail!("Value expected to be object"); }; @@ -346,7 +354,6 @@ mod tests { use super::Discriminator; use crate::core::config::{Field, Type}; - use crate::core::ir::discriminator::TypeName; use crate::core::valid::Validator; #[test] @@ -361,14 +368,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); // ambiguous cases @@ -376,21 +383,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); } @@ -408,14 +415,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); // ambiguous cases @@ -423,21 +430,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); } @@ -466,21 +473,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "a": 1, "ab": 1, "abab": 1 })).unwrap()) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "b": 1, "ab": 1, "abab": 1 })).unwrap()) .unwrap(), - TypeName::Single("B".to_string()) + "B" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "c": 1, "ac": 1 })).unwrap()) .unwrap(), - TypeName::Single("C".to_string()) + "C" ); // ambiguous cases @@ -488,21 +495,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "a": 1, "b": 1, "c": 1 })).unwrap()) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("C".to_string()) + "C" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("C".to_string()) + "C" ); } @@ -528,14 +535,14 @@ mod tests { &Value::from_json(json!({ "a": 123, "b": true, "foo": "test" })).unwrap() ) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); // ambiguous cases @@ -543,21 +550,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); // ambiguous cases @@ -565,21 +572,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); } @@ -599,14 +606,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "b": 123, "foo": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Bar".to_string()) + "Bar" ); assert_eq!( @@ -615,7 +622,7 @@ mod tests { &Value::from_json(json!({ "unknown": { "foo": "bar" }, "a": 1 })).unwrap() ) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); // ambiguous cases @@ -623,21 +630,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "foo": "test", "bar": "test" })).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Foo".to_string()) + "Foo" ); } @@ -667,21 +674,21 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "a": 1 })).unwrap()) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "b": 1, "aa": 1 })).unwrap()) .unwrap(), - TypeName::Single("B".to_string()) + "B" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "c": 1, "aaa": 1 })).unwrap()) .unwrap(), - TypeName::Single("C".to_string()) + "C" ); // ambiguous cases @@ -691,21 +698,21 @@ mod tests { &Value::from_json(json!({ "shared": 1, "a": 1, "b": 1, "c": 1 })).unwrap() ) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("A".to_string()) + "A" ); } @@ -766,14 +773,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({ "usual": 1 })).unwrap()) .unwrap(), - TypeName::Single("Var_Var".to_string()) + "Var_Var" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "usual": 1, "payload": 1 })).unwrap()) .unwrap(), - TypeName::Single("Var0_Var".to_string()) + "Var0_Var" ); assert_eq!( @@ -782,14 +789,14 @@ mod tests { &Value::from_json(json!({ "usual": 1, "command": 2, "useless": 1 })).unwrap() ) .unwrap(), - TypeName::Single("Var1_Var".to_string()) + "Var1_Var" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "usual": 1, "flag": true })).unwrap()) .unwrap(), - TypeName::Single("Var_Var0".to_string()) + "Var_Var0" ); assert_eq!( @@ -799,7 +806,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("Var_Var1".to_string()) + "Var_Var1" ); assert_eq!( @@ -808,7 +815,7 @@ mod tests { &Value::from_json(json!({ "usual": 1, "payload": 1, "flag": true })).unwrap() ) .unwrap(), - TypeName::Single("Var0_Var0".to_string()) + "Var0_Var0" ); assert_eq!( @@ -818,7 +825,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("Var0_Var1".to_string()) + "Var0_Var1" ); assert_eq!( @@ -827,7 +834,7 @@ mod tests { &Value::from_json(json!({ "usual": 1, "command": 1, "flag": true })).unwrap() ) .unwrap(), - TypeName::Single("Var1_Var0".to_string()) + "Var1_Var0" ); assert_eq!( @@ -837,7 +844,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("Var1_Var1".to_string()) + "Var1_Var1" ); // ambiguous cases @@ -855,14 +862,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("Var_Var".to_string()) + "Var_Var" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("Var_Var".to_string()) + "Var_Var" ); } @@ -901,14 +908,14 @@ mod tests { &Value::from_json(json!({ "uniqueA1": "value", "common": 1 })).unwrap() ) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "uniqueB1": true, "common": 2 })).unwrap()) .unwrap(), - TypeName::Single("TypeB".to_string()) + "TypeB" ); assert_eq!( @@ -918,7 +925,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("TypeC".to_string()) + "TypeC" ); assert_eq!( @@ -930,7 +937,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("TypeD".to_string()) + "TypeD" ); // ambiguous cases @@ -943,21 +950,21 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); } @@ -996,7 +1003,7 @@ mod tests { &Value::from_json(json!({ "field1": "value", "field2": "value" })).unwrap() ) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); assert_eq!( @@ -1005,7 +1012,7 @@ mod tests { &Value::from_json(json!({ "field2": "value", "field3": "value" })).unwrap() ) .unwrap(), - TypeName::Single("TypeB".to_string()) + "TypeB" ); assert_eq!( @@ -1014,7 +1021,7 @@ mod tests { &Value::from_json(json!({ "field1": "value", "field3": "value" })).unwrap() ) .unwrap(), - TypeName::Single("TypeC".to_string()) + "TypeC" ); assert_eq!( @@ -1026,7 +1033,7 @@ mod tests { .unwrap() ) .unwrap(), - TypeName::Single("TypeD".to_string()) + "TypeD" ); // ambiguous cases @@ -1047,14 +1054,14 @@ mod tests { discriminator .resolve_type(&Value::from_json(json!({})).unwrap()) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); assert_eq!( discriminator .resolve_type(&Value::from_json(json!({ "unknown": { "foo": "bar" }})).unwrap()) .unwrap(), - TypeName::Single("TypeA".to_string()) + "TypeA" ); } diff --git a/src/core/ir/eval.rs b/src/core/ir/eval.rs index 595cb57dbd..6bbf8871bd 100644 --- a/src/core/ir/eval.rs +++ b/src/core/ir/eval.rs @@ -5,8 +5,8 @@ use async_graphql_value::ConstValue; use super::eval_io::eval_io; use super::model::{Cache, CacheKey, Map, IR}; -use super::{Error, EvalContext, ResolverContextLike}; -use crate::core::json::JsonLike; +use super::{Error, EvalContext, ResolverContextLike, TypedValue}; +use crate::core::json::{JsonLike, JsonLikeList}; use crate::core::serde_value_ext::ValueExt; // Fake trait to capture proper lifetimes. @@ -89,9 +89,13 @@ impl IR { second.eval(ctx).await } IR::Discriminate(discriminator, expr) => expr.eval(ctx).await.and_then(|value| { - let type_name = discriminator.resolve_type(&value)?; + let value = value.map(|mut value| { + let type_name = discriminator.resolve_type(&value)?; - ctx.set_type_name(type_name); + value.set_type_name(type_name.to_string())?; + + anyhow::Ok(value) + })?; Ok(value) }), diff --git a/src/core/ir/eval_context.rs b/src/core/ir/eval_context.rs index a1afbcf26e..597335c00e 100644 --- a/src/core/ir/eval_context.rs +++ b/src/core/ir/eval_context.rs @@ -5,7 +5,6 @@ use std::sync::Arc; use async_graphql::{ServerError, Value}; use reqwest::header::HeaderMap; -use super::discriminator::TypeName; use super::{GraphQLOperationContext, RelatedFields, ResolverContextLike, SelectionField}; use crate::core::document::print_directives; use crate::core::http::RequestContext; @@ -25,12 +24,6 @@ pub struct EvalContext<'a, Ctx: ResolverContextLike> { // Overridden Arguments for Async GraphQL Context graphql_ctx_args: Option>, - - /// Type name of resolved data that is calculated - /// dynamically based on the shape of the value itself. - /// Required for proper Union type resolutions. - /// More details at [TypeName] - pub type_name: Option, } impl<'a, Ctx: ResolverContextLike> EvalContext<'a, Ctx> { @@ -56,7 +49,6 @@ impl<'a, Ctx: ResolverContextLike> EvalContext<'a, Ctx> { graphql_ctx, graphql_ctx_value: None, graphql_ctx_args: None, - type_name: None, } } @@ -114,10 +106,6 @@ impl<'a, Ctx: ResolverContextLike> EvalContext<'a, Ctx> { pub fn add_error(&self, error: ServerError) { self.graphql_ctx.add_error(error) } - - pub fn set_type_name(&mut self, type_name: TypeName) { - self.type_name = Some(type_name); - } } impl<'a, Ctx: ResolverContextLike> GraphQLOperationContext for EvalContext<'a, Ctx> { diff --git a/src/core/jit/builder.rs b/src/core/jit/builder.rs index 69698d1456..0d9a0c88ea 100644 --- a/src/core/jit/builder.rs +++ b/src/core/jit/builder.rs @@ -231,8 +231,26 @@ impl Builder { fields.push(flat_field); fields = fields.merge_right(child_fields); - } else { - // TODO: error if the field is not found in the schema + } else if field_name == "__typename" { + let flat_field = Field { + id: FieldId::new(self.field_id.next()), + name: field_name.to_string(), + output_name: field_name.to_string(), + ir: None, + type_of: crate::core::blueprint::Type::NamedType { + name: "String".to_owned(), + non_null: true, + }, + type_condition: type_condition.to_string(), + skip, + include, + args: Vec::new(), + pos: selection.pos.into(), + extensions: exts.clone(), + directives, + }; + + fields.push(flat_field); } } Selection::FragmentSpread(Positioned { node: fragment_spread, .. }) => { diff --git a/src/core/jit/common/jp.rs b/src/core/jit/common/jp.rs index de31dc9924..521ecdfc8c 100644 --- a/src/core/jit/common/jp.rs +++ b/src/core/jit/common/jp.rs @@ -5,7 +5,6 @@ use serde::Deserialize; use crate::core::blueprint::Blueprint; use crate::core::config::{Config, ConfigModule}; use crate::core::jit::builder::Builder; -use crate::core::jit::exec::TypedValue; use crate::core::jit::store::{Data, Store}; use crate::core::jit::synth::Synth; use crate::core::jit::{self, OperationPlan, Positioned, Variables}; @@ -26,7 +25,7 @@ struct TestData { users: Vec, } -type Entry = Data, Positioned>>; +type Entry = Data>>; struct ProcessedTestData { posts: Value, @@ -75,7 +74,6 @@ impl<'a, Value: JsonLike<'a> + Deserialize<'a> + Clone + 'a> TestData { Value::null() } }) - .map(TypedValue::new) .map(Ok) .map(Data::Single) .enumerate() @@ -130,7 +128,7 @@ impl< .to_owned(); let store = [ - (posts_id, Data::Single(Ok(TypedValue::new(posts)))), + (posts_id, Data::Single(Ok(posts))), (users_id, Data::Multiple(users)), ] .into_iter() diff --git a/src/core/jit/error.rs b/src/core/jit/error.rs index 8689b61ca4..b47380097c 100644 --- a/src/core/jit/error.rs +++ b/src/core/jit/error.rs @@ -28,8 +28,6 @@ pub enum ValidationError { // with async_graphql error message for this case #[error(r#"internal: invalid value for scalar "{type_of}", expected "FieldValue::Value""#)] ScalarInvalid { type_of: String }, - #[error("TypeName shape doesn't satisfy the processed object")] - TypeNameMismatch, #[error(r#"internal: invalid item for enum "{type_of}""#)] EnumInvalid { type_of: String }, #[error("internal: non-null types require a return value")] diff --git a/src/core/jit/exec.rs b/src/core/jit/exec.rs index 26e22c78ad..aa378f08fd 100644 --- a/src/core/jit/exec.rs +++ b/src/core/jit/exec.rs @@ -8,12 +8,12 @@ use futures_util::future::join_all; use super::context::{Context, RequestContext}; use super::{DataPath, OperationPlan, Positioned, Response, Store}; use crate::core::ir::model::IR; -use crate::core::ir::TypeName; +use crate::core::ir::TypedValue; use crate::core::jit; use crate::core::jit::synth::Synth; use crate::core::json::{JsonLike, JsonObjectLike}; -type SharedStore = Arc, Positioned>>>>; +type SharedStore = Arc>>>>; /// /// Default GraphQL executor that takes in a GraphQL Request and produces a @@ -34,7 +34,7 @@ where Self { exec, ctx: RequestContext::new(plan.clone()) } } - pub async fn store(&self) -> Store, Positioned>> { + pub async fn store(&self) -> Store>> { let store = Arc::new(Mutex::new(Store::new())); let mut ctx = ExecutorInner::new(store.clone(), &self.exec, &self.ctx); ctx.init().await; @@ -59,7 +59,7 @@ struct ExecutorInner<'a, Input, Output, Error, Exec> { impl<'a, Input, Output, Error, Exec> ExecutorInner<'a, Input, Output, Error, Exec> where - Output: for<'i> JsonLike<'i> + Debug, + for<'i> Output: JsonLike<'i> + TypedValue<'i> + Debug, Input: Clone + Debug, Exec: IRExecutor, { @@ -85,22 +85,17 @@ where &'b self, ctx: &'b Context<'b, Input, Output>, data_path: &DataPath, - result: TypedValueRef<'b, Output>, + value: &'b Output, ) -> Result<(), Error> { let field = ctx.field(); - let TypedValueRef { value, type_name } = result; // Array // Check if the field expects a list if field.type_of.is_list() { // Check if the value is an array if let Some(array) = value.as_array() { join_all(array.iter().enumerate().map(|(index, value)| { - let type_name = match &type_name { - Some(TypeName::Single(type_name)) => type_name, /* TODO: should throw */ - // ValidationError - Some(TypeName::Vec(v)) => &v[index], - None => field.type_of.name(), - }; + let type_name = value.get_type_name().unwrap_or(field.type_of.name()); + join_all(field.nested_iter(type_name).map(|field| { let ctx = ctx.with_value_and_field(value, field); let data_path = data_path.clone().with_index(index); @@ -116,11 +111,7 @@ where // TODO: Validate if the value is an Object // Has to be an Object, we don't do anything while executing if its a Scalar else { - let type_name = match &type_name { - Some(TypeName::Single(type_name)) => type_name, - Some(TypeName::Vec(_)) => panic!("TypeName type mismatch"), /* TODO: should throw ValidationError */ - None => field.type_of.name(), - }; + let type_name = value.get_type_name().unwrap_or(field.type_of.name()); join_all(field.nested_iter(type_name).map(|child| { let ctx = ctx.with_value_and_field(value, child); @@ -143,8 +134,8 @@ where if let Some(ir) = &field.ir { let result = self.ir_exec.execute(ir, ctx).await; - if let Ok(ref result) = result { - self.iter_field(ctx, &data_path, result.as_ref()).await?; + if let Ok(value) = &result { + self.iter_field(ctx, &data_path, value).await?; } let mut store = self.store.lock().unwrap(); @@ -169,49 +160,13 @@ where // here without doing the "fix" .unwrap_or(&default_obj); - let result = TypedValueRef { value, type_name: None }; - - self.iter_field(ctx, &data_path, result).await?; + self.iter_field(ctx, &data_path, value).await?; } Ok(()) } } -#[derive(Clone)] -pub struct TypedValue { - pub value: V, - pub type_name: Option, -} - -pub struct TypedValueRef<'a, V> { - pub value: &'a V, - pub type_name: Option<&'a TypeName>, -} - -impl TypedValue { - pub fn new(value: V) -> Self { - Self { value, type_name: None } - } - - pub fn as_ref(&self) -> TypedValueRef<'_, V> { - TypedValueRef { value: &self.value, type_name: self.type_name.as_ref() } - } -} - -impl<'a, V> TypedValueRef<'a, V> { - pub fn new(value: &'a V) -> Self { - Self { value, type_name: None } - } - - pub fn map<'out, U>(&self, map: impl FnOnce(&V) -> &'out U) -> TypedValueRef<'out, U> - where - 'a: 'out, - { - TypedValueRef { value: map(self.value), type_name: self.type_name } - } -} - /// Executor for IR pub trait IRExecutor { type Input; @@ -221,5 +176,5 @@ pub trait IRExecutor { &'a self, ir: &'a IR, ctx: &'a Context<'a, Self::Input, Self::Output>, - ) -> Result, Self::Error>; + ) -> Result; } diff --git a/src/core/jit/exec_const.rs b/src/core/jit/exec_const.rs index 45f530d367..df6f90f64f 100644 --- a/src/core/jit/exec_const.rs +++ b/src/core/jit/exec_const.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use async_graphql_value::ConstValue; use super::context::Context; -use super::exec::{Executor, IRExecutor, TypedValue}; +use super::exec::{Executor, IRExecutor}; use super::{Error, OperationPlan, Request, Response, Result}; use crate::core::app_context::AppContext; use crate::core::http::RequestContext; @@ -56,13 +56,10 @@ impl<'ctx> IRExecutor for ConstValueExec<'ctx> { &'a self, ir: &'a IR, ctx: &'a Context<'a, Self::Input, Self::Output>, - ) -> Result> { + ) -> Result { let req_context = &self.req_context; let mut eval_ctx = EvalContext::new(req_context, ctx); - Ok(ir - .eval(&mut eval_ctx) - .await - .map(|value| TypedValue { value, type_name: eval_ctx.type_name.take() })?) + Ok(ir.eval(&mut eval_ctx).await?) } } diff --git a/src/core/jit/response.rs b/src/core/jit/response.rs index 9d78ca8aea..1b4bf12ec3 100644 --- a/src/core/jit/response.rs +++ b/src/core/jit/response.rs @@ -103,7 +103,9 @@ mod test { Pos { line: 1, column: 2 }, ); let error2 = Positioned::new( - jit::Error::Validation(jit::ValidationError::TypeNameMismatch), + jit::Error::Validation(jit::ValidationError::EnumInvalid { + type_of: "EnumDef".to_string(), + }), Pos { line: 3, column: 4 }, ); diff --git a/src/core/jit/snapshots/tailcall__core__jit__response__test__conversion_to_async_graphql.snap b/src/core/jit/snapshots/tailcall__core__jit__response__test__conversion_to_async_graphql.snap index e3efb724ec..1b362296bd 100644 --- a/src/core/jit/snapshots/tailcall__core__jit__response__test__conversion_to_async_graphql.snap +++ b/src/core/jit/snapshots/tailcall__core__jit__response__test__conversion_to_async_graphql.snap @@ -11,7 +11,7 @@ Response { }, errors: [ ServerError { - message: "TypeName shape doesn't satisfy the processed object", + message: "internal: invalid item for enum \"EnumDef\"", locations: [ Pos(3:4), ], diff --git a/src/core/jit/synth/synth.rs b/src/core/jit/synth/synth.rs index 5c40f41f35..bcc0fb13e0 100644 --- a/src/core/jit/synth/synth.rs +++ b/src/core/jit/synth/synth.rs @@ -1,12 +1,11 @@ -use crate::core::ir::TypeName; -use crate::core::jit::exec::{TypedValue, TypedValueRef}; +use crate::core::ir::TypedValue; use crate::core::jit::model::{Field, Nested, OperationPlan, Variable, Variables}; use crate::core::jit::store::{Data, DataPath, Store}; use crate::core::jit::{Error, PathSegment, Positioned, ValidationError}; use crate::core::json::{JsonLike, JsonObjectLike}; use crate::core::scalar; -type ValueStore = Store, Positioned>>; +type ValueStore = Store>>; pub struct Synth { plan: OperationPlan, @@ -45,7 +44,7 @@ impl Synth { impl<'a, Value> Synth where - Value: JsonLike<'a> + Clone, + Value: JsonLike<'a> + Clone + std::fmt::Debug, Value::JsonObject<'a>: JsonObjectLike<'a, Value = Value>, { #[inline(always)] @@ -79,7 +78,7 @@ where fn iter( &'a self, node: &'a Field, Value>, - result: Option>, + value: Option<&'a Value>, data_path: &DataPath, ) -> Result> { match self.store.get(&node.id) { @@ -97,15 +96,12 @@ where match data { Data::Single(result) => { - let result = match result { - Ok(result) => result, - Err(err) => return Err(err.clone()), - }; + let value = result.as_ref().map_err(Clone::clone)?; - if !Self::is_array(&node.type_of, &result.value) { + if !Self::is_array(&node.type_of, value) { return Ok(Value::null()); } - self.iter_inner(node, result.as_ref(), data_path) + self.iter_inner(node, value, data_path) } _ => { // TODO: should bailout instead of returning Null @@ -113,8 +109,8 @@ where } } } - None => match result { - Some(result) => self.iter_inner(node, result, data_path), + None => match value { + Some(value) => self.iter_inner(node, value, data_path), None => Ok(Value::null()), }, } @@ -124,15 +120,13 @@ where fn iter_inner( &'a self, node: &'a Field, Value>, - result: TypedValueRef<'a, Value>, + value: &'a Value, data_path: &DataPath, ) -> Result> { if !self.include(node) { return Ok(Value::null()); } - let TypedValueRef { type_name, value } = result; - let eval_result = if value.is_null() { if node.type_of.is_nullable() { Ok(Value::null()) @@ -172,20 +166,7 @@ where (_, Some(obj)) => { let mut ans = Value::JsonObject::new(); - let type_name = match &type_name { - Some(TypeName::Single(type_name)) => type_name, - Some(TypeName::Vec(v)) => { - if let Some(index) = data_path.as_slice().last() { - &v[*index] - } else { - return Err(Positioned::new( - ValidationError::TypeNameMismatch.into(), - node.pos, - )); - } - } - None => node.type_of.name(), - }; + let type_name = value.get_type_name().unwrap_or(node.type_of.name()); for child in node.nested_iter(type_name) { // all checks for skip must occur in `iter_inner` @@ -193,10 +174,7 @@ where let include = self.include(child); if include { let val = obj.get_key(child.name.as_str()); - ans.insert_key( - &child.output_name, - self.iter(child, val.map(TypedValueRef::new), data_path)?, - ); + ans.insert_key(&child.output_name, self.iter(child, val, data_path)?); } } @@ -205,11 +183,7 @@ where (Some(arr), _) => { let mut ans = vec![]; for (i, val) in arr.iter().enumerate() { - let val = self.iter_inner( - node, - result.map(|_| val), - &data_path.clone().with_index(i), - )?; + let val = self.iter_inner(node, val, &data_path.clone().with_index(i))?; ans.push(val) } Ok(Value::array(ans)) @@ -256,7 +230,6 @@ mod tests { use crate::core::config::{Config, ConfigModule}; use crate::core::jit::builder::Builder; use crate::core::jit::common::JP; - use crate::core::jit::exec::TypedValue; use crate::core::jit::model::{FieldId, Variables}; use crate::core::jit::store::{Data, Store}; use crate::core::jit::synth::Synth; @@ -312,22 +285,20 @@ mod tests { } impl TestData { - fn into_value<'a, Value: Deserialize<'a>>(self) -> Data> { + fn into_value<'a, Value: Deserialize<'a>>(self) -> Data { match self { - Self::Posts => Data::Single(TypedValue::new(serde_json::from_str(POSTS).unwrap())), - Self::User1 => Data::Single(TypedValue::new(serde_json::from_str(USER1).unwrap())), + Self::Posts => Data::Single(serde_json::from_str(POSTS).unwrap()), + Self::User1 => Data::Single(serde_json::from_str(USER1).unwrap()), TestData::UsersData => Data::Multiple( vec![ - Data::Single(TypedValue::new(serde_json::from_str(USER1).unwrap())), - Data::Single(TypedValue::new(serde_json::from_str(USER2).unwrap())), + Data::Single(serde_json::from_str(USER1).unwrap()), + Data::Single(serde_json::from_str(USER2).unwrap()), ] .into_iter() .enumerate() .collect(), ), - TestData::Users => { - Data::Single(TypedValue::new(serde_json::from_str(USERS).unwrap())) - } + TestData::Users => Data::Single(serde_json::from_str(USERS).unwrap()), } } } diff --git a/src/core/json/borrow.rs b/src/core/json/borrow.rs index 7e6001cf2b..59fd376d7c 100644 --- a/src/core/json/borrow.rs +++ b/src/core/json/borrow.rs @@ -47,10 +47,31 @@ impl<'ctx> JsonLike<'ctx> for Value<'ctx> { } } + fn into_array(self) -> Option> { + match self { + Value::Array(array) => Some(array), + _ => None, + } + } + fn as_object(&self) -> Option<&Self::JsonObject<'_>> { self.as_object() } + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject<'ctx>> { + match self { + Value::Object(obj) => Some(obj), + _ => None, + } + } + + fn into_object(self) -> Option> { + match self { + Value::Object(obj) => Some(obj), + _ => None, + } + } + fn as_str(&self) -> Option<&str> { self.as_str() } diff --git a/src/core/json/graphql.rs b/src/core/json/graphql.rs index 0810f70c1e..a41609a80a 100644 --- a/src/core/json/graphql.rs +++ b/src/core/json/graphql.rs @@ -15,7 +15,7 @@ impl<'obj, Value: JsonLike<'obj> + Clone> JsonObjectLike<'obj> for IndexMap Option<&Self::Value> { - self.get(&Name::new(key)) + self.get(key) } fn insert_key(&mut self, key: &'obj str, value: Self::Value) { @@ -33,6 +33,13 @@ impl<'json> JsonLike<'json> for ConstValue { } } + fn into_array(self) -> Option> { + match self { + ConstValue::List(seq) => Some(seq), + _ => None, + } + } + fn as_str(&self) -> Option<&str> { match self { ConstValue::String(s) => Some(s), @@ -110,6 +117,20 @@ impl<'json> JsonLike<'json> for ConstValue { } } + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject<'_>> { + match self { + ConstValue::Object(map) => Some(map), + _ => None, + } + } + + fn into_object(self) -> Option> { + match self { + ConstValue::Object(map) => Some(map), + _ => None, + } + } + fn object(obj: Self::JsonObject<'json>) -> Self { ConstValue::Object(obj) } diff --git a/src/core/json/json_like.rs b/src/core/json/json_like.rs index 1b74bea3c8..2b10d39eb0 100644 --- a/src/core/json/json_like.rs +++ b/src/core/json/json_like.rs @@ -31,7 +31,10 @@ pub trait JsonLike<'json>: Sized { // Operators fn as_array(&self) -> Option<&Vec>; + fn into_array(self) -> Option>; fn as_object(&self) -> Option<&Self::JsonObject<'_>>; + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject<'json>>; + fn into_object(self) -> Option>; fn as_str(&self) -> Option<&str>; fn as_i64(&self) -> Option; fn as_u64(&self) -> Option; diff --git a/src/core/json/json_like_list.rs b/src/core/json/json_like_list.rs new file mode 100644 index 0000000000..6743b380a8 --- /dev/null +++ b/src/core/json/json_like_list.rs @@ -0,0 +1,28 @@ +use super::JsonLike; + +pub trait JsonLikeList<'json>: JsonLike<'json> { + fn map(self, mut mapper: impl FnMut(Self) -> Result) -> Result { + if self.as_array().is_some() { + let new = self + .into_array() + .unwrap() + .into_iter() + .map(mapper) + .collect::>()?; + + Ok(Self::array(new)) + } else { + mapper(self) + } + } + + fn try_for_each(&self, mut f: impl FnMut(&Self) -> Result<(), Err>) -> Result<(), Err> { + if let Some(arr) = self.as_array() { + arr.iter().try_for_each(f) + } else { + f(self) + } + } +} + +impl<'json, T: JsonLike<'json>> JsonLikeList<'json> for T {} diff --git a/src/core/json/mod.rs b/src/core/json/mod.rs index 95efa530d0..c1a11d6919 100644 --- a/src/core/json/mod.rs +++ b/src/core/json/mod.rs @@ -1,12 +1,14 @@ mod borrow; mod graphql; mod json_like; +mod json_like_list; mod json_schema; mod serde; use std::collections::HashMap; pub use json_like::*; +pub use json_like_list::*; pub use json_schema::*; // Highly micro-optimized and benchmarked version of get_path_all diff --git a/src/core/json/serde.rs b/src/core/json/serde.rs index 0064e7b557..8ee36499c8 100644 --- a/src/core/json/serde.rs +++ b/src/core/json/serde.rs @@ -26,6 +26,14 @@ impl<'json> JsonLike<'json> for serde_json::Value { self.as_array() } + fn into_array(self) -> Option> { + if let Self::Array(vec) = self { + Some(vec) + } else { + None + } + } + fn as_str(&self) -> Option<&str> { self.as_str() } @@ -85,6 +93,18 @@ impl<'json> JsonLike<'json> for serde_json::Value { self.as_object() } + fn as_object_mut(&mut self) -> Option<&mut Self::JsonObject<'_>> { + self.as_object_mut() + } + + fn into_object(self) -> Option> { + if let Self::Object(obj) = self { + Some(obj) + } else { + None + } + } + fn object(obj: Self::JsonObject<'json>) -> Self { serde_json::Value::Object(obj) } diff --git a/tests/core/snapshots/test-union.md_5.snap b/tests/core/snapshots/test-union.md_5.snap new file mode 100644 index 0000000000..442efd36fc --- /dev/null +++ b/tests/core/snapshots/test-union.md_5.snap @@ -0,0 +1,45 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "foo": { + "__typename": "Foo" + }, + "bar": { + "__typename": "Bar" + }, + "arr": [ + { + "__typename": "Foo" + }, + { + "__typename": "Bar" + }, + { + "__typename": "Foo" + }, + { + "__typename": "Foo" + }, + { + "__typename": "Bar" + } + ], + "nested": { + "foo": { + "__typename": "Foo" + }, + "bar": { + "__typename": "Bar" + } + } + } + } +} diff --git a/tests/execution/test-union.md b/tests/execution/test-union.md index 8d7eea5383..907920a1ee 100644 --- a/tests/execution/test-union.md +++ b/tests/execution/test-union.md @@ -37,6 +37,7 @@ type Query { - request: method: GET url: http://jsonplaceholder.typicode.com/foo + expectedHits: 2 response: status: 200 body: @@ -45,6 +46,7 @@ type Query { - request: method: GET url: http://jsonplaceholder.typicode.com/bar + expectedHits: 2 response: status: 200 body: @@ -53,6 +55,7 @@ type Query { - request: method: GET url: http://jsonplaceholder.typicode.com/nested + expectedHits: 2 response: status: 200 body: @@ -64,6 +67,7 @@ type Query { - request: method: GET url: http://jsonplaceholder.typicode.com/arr + expectedHits: 2 response: status: 200 body: @@ -155,4 +159,28 @@ type Query { } } } + +- method: POST + url: http://localhost:8080/graphql + body: + query: > + query { + foo { + __typename + } + bar { + __typename + } + arr { + __typename + } + nested { + foo { + __typename + } + bar { + __typename + } + } + } ``` From f66fc89d4ecda88a5cb257285109d1fdbaf54e6e Mon Sep 17 00:00:00 2001 From: Sahil Yeole <73148455+beelchester@users.noreply.github.com> Date: Mon, 26 Aug 2024 22:13:34 +0530 Subject: [PATCH 2/8] feat(2690): make llm models configurable (#2716) Co-authored-by: Tushar Mathur Co-authored-by: Sandipsinh Dilipsinh Rathod <62684960+ssddOnTop@users.noreply.github.com> Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Mehul Mathur --- src/cli/generator/config.rs | 37 +- src/cli/generator/generator.rs | 26 +- src/cli/llm/infer_type_name.rs | 15 +- src/cli/llm/mod.rs | 1 - src/cli/llm/model.rs | 73 --- src/cli/llm/wizard.rs | 5 +- ..._fixtures__generator__gen_deezer.json.snap | 419 ++++++++++++++++++ ...__generator__gen_jsonplaceholder.json.snap | 81 ++++ 8 files changed, 546 insertions(+), 111 deletions(-) delete mode 100644 src/cli/llm/model.rs create mode 100644 tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_deezer.json.snap create mode 100644 tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_jsonplaceholder.json.snap diff --git a/src/cli/generator/config.rs b/src/cli/generator/config.rs index 08d9fd40da..dab568cb42 100644 --- a/src/cli/generator/config.rs +++ b/src/cli/generator/config.rs @@ -25,8 +25,18 @@ pub struct Config { #[serde(skip_serializing_if = "Option::is_none")] pub preset: Option, pub schema: Schema, - #[serde(skip_serializing_if = "TemplateString::is_empty")] - pub secret: TemplateString, + #[serde(skip_serializing_if = "Option::is_none")] + pub llm: Option, +} + +#[derive(Deserialize, Serialize, Debug, Default, PartialEq, Clone)] +#[serde(rename_all = "camelCase")] +#[serde(deny_unknown_fields)] +pub struct LLMConfig { + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub secret: Option, } #[derive(Clone, Deserialize, Serialize, Debug, Default)] @@ -273,13 +283,17 @@ impl Config { .collect::>>>()?; let output = self.output.resolve(parent_dir)?; + let llm = self.llm.map(|llm| { + let secret = llm.secret.map(|s| s.resolve(&reader_context)); + LLMConfig { model: llm.model, secret } + }); Ok(Config { inputs, output, schema: self.schema, preset: self.preset, - secret: self.secret.resolve(&reader_context), + llm, }) } } @@ -419,7 +433,7 @@ mod tests { fn test_raise_error_unknown_field_at_root_level() { let json = r#"{"input": "value"}"#; let expected_error = - "unknown field `input`, expected one of `inputs`, `output`, `preset`, `schema`, `secret` at line 1 column 8"; + "unknown field `input`, expected one of `inputs`, `output`, `preset`, `schema`, `llm` at line 1 column 8"; assert_deserialization_error(json, expected_error); } @@ -492,7 +506,7 @@ mod tests { } #[test] - fn test_secret() { + fn test_llm_config() { let mut env_vars = HashMap::new(); let token = "eyJhbGciOiJIUzI1NiIsInR5"; env_vars.insert("TAILCALL_SECRET".to_owned(), token.to_owned()); @@ -506,12 +520,17 @@ mod tests { headers: Default::default(), }; - let config = - Config::default().secret(TemplateString::parse("{{.env.TAILCALL_SECRET}}").unwrap()); + let config = Config::default().llm(Some(LLMConfig { + model: Some("gpt-3.5-turbo".to_string()), + secret: Some(TemplateString::parse("{{.env.TAILCALL_SECRET}}").unwrap()), + })); let resolved_config = config.into_resolved("", reader_ctx).unwrap(); - let actual = resolved_config.secret; - let expected = TemplateString::from("eyJhbGciOiJIUzI1NiIsInR5"); + let actual = resolved_config.llm; + let expected = Some(LLMConfig { + model: Some("gpt-3.5-turbo".to_string()), + secret: Some(TemplateString::from(token)), + }); assert_eq!(actual, expected); } diff --git a/src/cli/generator/generator.rs b/src/cli/generator/generator.rs index b54ce98224..f34ce02616 100644 --- a/src/cli/generator/generator.rs +++ b/src/cli/generator/generator.rs @@ -6,7 +6,7 @@ use hyper::HeaderMap; use inquire::Confirm; use pathdiff::diff_paths; -use super::config::{Config, Resolved, Source}; +use super::config::{Config, LLMConfig, Resolved, Source}; use super::source::ConfigSource; use crate::cli::llm::InferTypeName; use crate::core::config::transformer::{Preset, RenameTypes}; @@ -164,7 +164,7 @@ impl Generator { let query_type = config.schema.query.clone(); let mutation_type_name = config.schema.mutation.clone(); - let secret = config.secret.clone(); + let llm = config.llm.clone(); let preset = config.preset.clone().unwrap_or_default(); let preset: Preset = preset.validate_into().to_result()?; let input_samples = self.resolve_io(config).await?; @@ -180,19 +180,15 @@ impl Generator { let mut config = config_gen.mutation(mutation_type_name).generate(true)?; if infer_type_names { - let key = if !secret.is_empty() { - Some(secret.to_string()) - } else { - None - }; - - let mut llm_gen = InferTypeName::new(key); - let suggested_names = llm_gen.generate(config.config()).await?; - let cfg = RenameTypes::new(suggested_names.iter()) - .transform(config.config().to_owned()) - .to_result()?; - - config = ConfigModule::from(cfg); + if let Some(LLMConfig { model: Some(model), secret }) = llm { + let mut llm_gen = InferTypeName::new(model, secret.map(|s| s.to_string())); + let suggested_names = llm_gen.generate(config.config()).await?; + let cfg = RenameTypes::new(suggested_names.iter()) + .transform(config.config().to_owned()) + .to_result()?; + + config = ConfigModule::from(cfg); + } } self.write(&config, &path).await?; diff --git a/src/cli/llm/infer_type_name.rs b/src/cli/llm/infer_type_name.rs index 61ae8bb360..cfba78ff77 100644 --- a/src/cli/llm/infer_type_name.rs +++ b/src/cli/llm/infer_type_name.rs @@ -4,14 +4,12 @@ use genai::chat::{ChatMessage, ChatRequest, ChatResponse}; use serde::{Deserialize, Serialize}; use serde_json::json; -use super::model::groq; use super::{Error, Result, Wizard}; use crate::core::config::Config; use crate::core::Mustache; -#[derive(Default)] pub struct InferTypeName { - secret: Option, + wizard: Wizard, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -74,14 +72,11 @@ impl TryInto for Question { } impl InferTypeName { - pub fn new(secret: Option) -> InferTypeName { - Self { secret } + pub fn new(model: String, secret: Option) -> InferTypeName { + Self { wizard: Wizard::new(model, secret) } } - pub async fn generate(&mut self, config: &Config) -> Result> { - let secret = self.secret.as_ref().map(|s| s.to_owned()); - - let wizard: Wizard = Wizard::new(groq::LLAMA38192, secret); + pub async fn generate(&mut self, config: &Config) -> Result> { let mut new_name_mappings: HashMap = HashMap::new(); // removed root type from types. @@ -104,7 +99,7 @@ impl InferTypeName { let mut delay = 3; loop { - let answer = wizard.ask(question.clone()).await; + let answer = self.wizard.ask(question.clone()).await; match answer { Ok(answer) => { let name = &answer.suggestions.join(", "); diff --git a/src/cli/llm/mod.rs b/src/cli/llm/mod.rs index ef63fb9d4a..40c0dce610 100644 --- a/src/cli/llm/mod.rs +++ b/src/cli/llm/mod.rs @@ -3,7 +3,6 @@ pub mod infer_type_name; pub use error::Error; use error::Result; pub use infer_type_name::InferTypeName; -mod model; mod wizard; pub use wizard::Wizard; diff --git a/src/cli/llm/model.rs b/src/cli/llm/model.rs deleted file mode 100644 index a3da95d8eb..0000000000 --- a/src/cli/llm/model.rs +++ /dev/null @@ -1,73 +0,0 @@ -#![allow(unused)] - -use std::borrow::Cow; -use std::fmt::{Display, Formatter}; -use std::marker::PhantomData; - -use derive_setters::Setters; -use genai::adapter::AdapterKind; - -#[derive(Clone)] -pub struct Model(&'static str); - -pub mod open_ai { - use super::*; - pub const GPT3_5_TURBO: Model = Model("gp-3.5-turbo"); - pub const GPT4: Model = Model("gpt-4"); - pub const GPT4_TURBO: Model = Model("gpt-4-turbo"); - pub const GPT4O_MINI: Model = Model("gpt-4o-mini"); - pub const GPT4O: Model = Model("gpt-4o"); -} - -pub mod ollama { - use super::*; - pub const GEMMA2B: Model = Model("gemma:2b"); -} - -pub mod anthropic { - use super::*; - pub const CLAUDE3_HAIKU_20240307: Model = Model("claude-3-haiku-20240307"); - pub const CLAUDE3_SONNET_20240229: Model = Model("claude-3-sonnet-20240229"); - pub const CLAUDE3_OPUS_20240229: Model = Model("claude-3-opus-20240229"); - pub const CLAUDE35_SONNET_20240620: Model = Model("claude-3-5-sonnet-20240620"); -} - -pub mod cohere { - use super::*; - pub const COMMAND_LIGHT_NIGHTLY: Model = Model("command-light-nightly"); - pub const COMMAND_LIGHT: Model = Model("command-light"); - pub const COMMAND_NIGHTLY: Model = Model("command-nightly"); - pub const COMMAND: Model = Model("command"); - pub const COMMAND_R: Model = Model("command-r"); - pub const COMMAND_R_PLUS: Model = Model("command-r-plus"); -} - -pub mod gemini { - use super::*; - pub const GEMINI15_FLASH_LATEST: Model = Model("gemini-1.5-flash-latest"); - pub const GEMINI10_PRO: Model = Model("gemini-1.0-pro"); - pub const GEMINI15_FLASH: Model = Model("gemini-1.5-flash"); - pub const GEMINI15_PRO: Model = Model("gemini-1.5-pro"); -} - -pub mod groq { - use super::*; - pub const LLAMA708192: Model = Model("llama3-70b-8192"); - pub const LLAMA38192: Model = Model("llama3-8b-8192"); - pub const LLAMA_GROQ8B8192_TOOL_USE_PREVIEW: Model = - Model("llama3-groq-8b-8192-tool-use-preview"); - pub const LLAMA_GROQ70B8192_TOOL_USE_PREVIEW: Model = - Model("llama3-groq-70b-8192-tool-use-preview"); - pub const GEMMA29B_IT: Model = Model("gemma2-9b-it"); - pub const GEMMA7B_IT: Model = Model("gemma-7b-it"); - pub const MIXTRAL_8X7B32768: Model = Model("mixtral-8x7b-32768"); - pub const LLAMA8B_INSTANT: Model = Model("llama-3.1-8b-instant"); - pub const LLAMA70B_VERSATILE: Model = Model("llama-3.1-70b-versatile"); - pub const LLAMA405B_REASONING: Model = Model("llama-3.1-405b-reasoning"); -} - -impl Model { - pub fn as_str(&self) -> &'static str { - self.0 - } -} diff --git a/src/cli/llm/wizard.rs b/src/cli/llm/wizard.rs index 1604d7f15f..46d7a18624 100644 --- a/src/cli/llm/wizard.rs +++ b/src/cli/llm/wizard.rs @@ -5,18 +5,17 @@ use genai::resolver::AuthResolver; use genai::Client; use super::Result; -use crate::cli::llm::model::Model; #[derive(Setters, Clone)] pub struct Wizard { client: Client, - model: Model, + model: String, _q: std::marker::PhantomData, _a: std::marker::PhantomData, } impl Wizard { - pub fn new(model: Model, secret: Option) -> Self { + pub fn new(model: String, secret: Option) -> Self { let mut config = genai::adapter::AdapterConfig::default(); if let Some(key) = secret { config = config.with_auth_resolver(AuthResolver::from_key_value(key)); diff --git a/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_deezer.json.snap b/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_deezer.json.snap new file mode 100644 index 0000000000..3f1b48ca31 --- /dev/null +++ b/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_deezer.json.snap @@ -0,0 +1,419 @@ +--- +source: tests/cli/gen.rs +expression: config.to_sdl() +--- +schema @server @upstream(baseURL: "https://api.deezer.com") { + query: Query +} + +type Album { + cover: String + cover_big: String + cover_medium: String + cover_small: String + cover_xl: String + id: Int + md5_image: String + title: String + tracklist: String + type: String +} + +type Artist { + id: Int + link: String + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + radio: Boolean + tracklist: String + type: String +} + +type Chart { + albums: T167 + artists: T169 + playlists: T181 + podcasts: Podcast + tracks: T166 +} + +type Contributor { + id: Int + link: String + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + radio: Boolean + role: String + share: String + tracklist: String + type: String +} + +type Datum { + album: Album + artist: T42 + duration: Int + explicit_content_cover: Int + explicit_content_lyrics: Int + explicit_lyrics: Boolean + id: Int + link: String + md5_image: String + preview: String + rank: Int + readable: Boolean + time_add: Int + title: String + title_short: String + title_version: String + type: String +} + +type Editorial { + data: [T185] + total: Int +} + +type Genre { + data: [T5] +} + +type Playlist { + checksum: String + collaborative: Boolean + creation_date: String + creator: User + description: String + duration: Int + fans: Int + id: Int + is_loved_track: Boolean + link: String + md5_image: String + nb_tracks: Int + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_type: String + picture_xl: String + public: Boolean + share: String + title: String + tracklist: String + tracks: Track + type: String +} + +type Podcast { + data: [T182] + total: Int +} + +type Query { + album(p1: Int!): T39 @http(path: "/album/{{.args.p1}}") + artist(p1: Int!): T40 @http(path: "/artist/{{.args.p1}}") + chart: Chart @http(path: "/chart") + editorial: Editorial @http(path: "/editorial") + playlist(p1: Int!): Playlist @http(path: "/playlist/{{.args.p1}}") + search(q: String): Search @http(path: "/search", query: [{key: "q", value: "{{.args.q}}"}]) + track(p1: Int!): T4 @http(path: "/track/{{.args.p1}}") + user(p1: Int!): T187 @http(path: "/user/{{.args.p1}}") +} + +type Search { + data: [JSON] + next: String + total: Int +} + +type T165 { + album: Album + artist: Artist + duration: Int + explicit_content_cover: Int + explicit_content_lyrics: Int + explicit_lyrics: Boolean + id: Int + link: String + md5_image: String + position: Int + preview: String + rank: Int + title: String + title_short: String + title_version: String + type: String +} + +type T166 { + data: [T165] + total: Int +} + +type T167 { + data: [JSON] + total: Int +} + +type T168 { + id: Int + link: String + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + position: Int + radio: Boolean + tracklist: String + type: String +} + +type T169 { + data: [T168] + total: Int +} + +type T180 { + checksum: String + creation_date: String + id: Int + link: String + md5_image: String + nb_tracks: Int + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_type: String + picture_xl: String + public: Boolean + title: String + tracklist: String + type: String + user: User +} + +type T181 { + data: [T180] + total: Int +} + +type T182 { + available: Boolean + description: String + fans: Int + id: Int + link: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + share: String + title: String + type: String +} + +type T185 { + id: Int + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + type: String +} + +type T187 { + country: String + id: Int + link: String + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + tracklist: String + type: String +} + +type T2 { + id: Int + link: String + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + radio: Boolean + share: String + tracklist: String + type: String +} + +type T3 { + cover: String + cover_big: String + cover_medium: String + cover_small: String + cover_xl: String + id: Int + link: String + md5_image: String + release_date: String + title: String + tracklist: String + type: String +} + +type T37 { + album: Album + artist: User + duration: Int + explicit_content_cover: Int + explicit_content_lyrics: Int + explicit_lyrics: Boolean + id: Int + link: String + md5_image: String + preview: String + rank: Int + readable: Boolean + title: String + title_short: String + title_version: String + type: String +} + +type T38 { + data: [T37] +} + +type T39 { + artist: T8 + available: Boolean + contributors: [Contributor] + cover: String + cover_big: String + cover_medium: String + cover_small: String + cover_xl: String + duration: Int + explicit_content_cover: Int + explicit_content_lyrics: Int + explicit_lyrics: Boolean + fans: Int + genre_id: Int + genres: Genre + id: Int + label: String + link: String + md5_image: String + nb_tracks: Int + record_type: String + release_date: String + share: String + title: String + tracklist: String + tracks: T38 + type: String + upc: String +} + +type T4 { + album: T3 + artist: T2 + available_countries: [String] + bpm: Int + contributors: [Contributor] + disk_number: Int + duration: Int + explicit_content_cover: Int + explicit_content_lyrics: Int + explicit_lyrics: Boolean + gain: Int + id: Int + isrc: String + link: String + md5_image: String + preview: String + rank: Int + readable: Boolean + release_date: String + share: String + title: String + title_short: String + title_version: String + track_position: Int + type: String +} + +type T40 { + id: Int + link: String + name: String + nb_album: Int + nb_fan: Int + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + radio: Boolean + share: String + tracklist: String + type: String +} + +type T42 { + id: Int + link: String + name: String + tracklist: String + type: String +} + +type T5 { + id: Int + name: String + picture: String + type: String +} + +type T8 { + id: Int + name: String + picture: String + picture_big: String + picture_medium: String + picture_small: String + picture_xl: String + tracklist: String + type: String +} + +type Track { + checksum: String + data: [Datum] +} + +type User { + id: Int + name: String + tracklist: String + type: String +} diff --git a/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_jsonplaceholder.json.snap b/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_jsonplaceholder.json.snap new file mode 100644 index 0000000000..47bdfb6296 --- /dev/null +++ b/tests/cli/snapshots/cli_spec__test__generator_spec__tests__cli__fixtures__generator__gen_jsonplaceholder.json.snap @@ -0,0 +1,81 @@ +--- +source: tests/cli/gen.rs +expression: config.to_sdl() +--- +schema @server @upstream(baseURL: "https://jsonplaceholder.typicode.com") { + query: Query +} + +type Address { + city: String + geo: Geo + street: String + suite: String + zipcode: String +} + +type Comment { + body: String + email: String + id: Int + name: String + postId: Int +} + +type Company { + bs: String + catchPhrase: String + name: String +} + +type Geo { + lat: String + lng: String +} + +type Photo { + albumId: Int + id: Int + thumbnailUrl: String + title: String + url: String +} + +type Post { + body: String + id: Int + title: String + userId: Int +} + +type Query { + comment(p1: Int!): Comment @http(path: "/comments/{{.args.p1}}") + comments: [Comment] @http(path: "/comments") + photo(p1: Int!): Photo @http(path: "/photos/{{.args.p1}}") + photos: [Photo] @http(path: "/photos") + post(p1: Int!): Post @http(path: "/posts/{{.args.p1}}") + postComments(postId: Int): [Comment] @http(path: "/comments", query: [{key: "postId", value: "{{.args.postId}}"}]) + posts: [Post] @http(path: "/posts") + todo(p1: Int!): Todo @http(path: "/todos/{{.args.p1}}") + todos: [Todo] @http(path: "/todos") + user(p1: Int!): User @http(path: "/users/{{.args.p1}}") + users: [User] @http(path: "/users") +} + +type Todo { + completed: Boolean + id: Int + title: String + userId: Int +} + +type User { + address: Address + company: Company + email: String + id: Int + name: String + phone: String + username: String + website: String +} From 452648e36cc9931ca6cb25c056547b6a8e6e44da Mon Sep 17 00:00:00 2001 From: Mehul Mathur Date: Mon, 26 Aug 2024 23:52:08 +0530 Subject: [PATCH 3/8] refactor: move cli error to core (#2708) Co-authored-by: Tushar Mathur --- src/cli/mod.rs | 2 - src/cli/runtime/file.rs | 7 +- src/cli/server/http_1.rs | 6 +- src/cli/server/http_2.rs | 4 +- src/cli/server/http_server.rs | 4 +- src/cli/tc/check.rs | 4 +- src/cli/telemetry.rs | 6 +- src/{cli/error.rs => core/errata.rs} | 113 ++++++++++++++------------- src/core/grpc/request.rs | 2 +- src/core/http/response.rs | 6 +- src/core/ir/error.rs | 69 +++++++++++----- src/core/ir/eval.rs | 7 +- src/core/ir/eval_http.rs | 2 +- src/core/mod.rs | 2 + src/main.rs | 6 +- 15 files changed, 131 insertions(+), 109 deletions(-) rename src/{cli/error.rs => core/errata.rs} (78%) diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 59798bc6b5..eebb64373a 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -1,5 +1,4 @@ pub mod command; -mod error; mod fmt; pub mod generator; #[cfg(feature = "js")] @@ -11,5 +10,4 @@ pub mod server; mod tc; pub mod telemetry; pub(crate) mod update_checker; -pub use error::CLIError; pub use tc::run::run; diff --git a/src/cli/runtime/file.rs b/src/cli/runtime/file.rs index 3d9be7ed77..72a55160c6 100644 --- a/src/cli/runtime/file.rs +++ b/src/cli/runtime/file.rs @@ -1,7 +1,6 @@ use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use crate::cli::CLIError; -use crate::core::FileIO; +use crate::core::{Errata, FileIO}; #[derive(Clone)] pub struct NativeFileIO {} @@ -29,7 +28,7 @@ async fn write<'a>(path: &'a str, content: &'a [u8]) -> anyhow::Result<()> { impl FileIO for NativeFileIO { async fn write<'a>(&'a self, path: &'a str, content: &'a [u8]) -> anyhow::Result<()> { write(path, content).await.map_err(|err| { - CLIError::new(format!("Failed to write file: {}", path).as_str()) + Errata::new(format!("Failed to write file: {}", path).as_str()) .description(err.to_string()) })?; tracing::info!("File write: {} ... ok", path); @@ -38,7 +37,7 @@ impl FileIO for NativeFileIO { async fn read<'a>(&'a self, path: &'a str) -> anyhow::Result { let content = read(path).await.map_err(|err| { - CLIError::new(format!("Failed to read file: {}", path).as_str()) + Errata::new(format!("Failed to read file: {}", path).as_str()) .description(err.to_string()) })?; tracing::info!("File read: {} ... ok", path); diff --git a/src/cli/server/http_1.rs b/src/cli/server/http_1.rs index 22d7d97c96..76360e860a 100644 --- a/src/cli/server/http_1.rs +++ b/src/cli/server/http_1.rs @@ -4,9 +4,9 @@ use hyper::service::{make_service_fn, service_fn}; use tokio::sync::oneshot; use super::server_config::ServerConfig; -use crate::cli::CLIError; use crate::core::async_graphql_hyper::{GraphQLBatchRequest, GraphQLRequest}; use crate::core::http::handle_request; +use crate::core::Errata; pub async fn start_http_1( sc: Arc, @@ -31,7 +31,7 @@ pub async fn start_http_1( } }); let builder = hyper::Server::try_bind(&addr) - .map_err(CLIError::from)? + .map_err(Errata::from)? .http1_pipeline_flush(sc.app_ctx.blueprint.server.pipeline_flush); super::log_launch(sc.as_ref()); @@ -48,7 +48,7 @@ pub async fn start_http_1( builder.serve(make_svc_single_req).await }; - let result = server.map_err(CLIError::from); + let result = server.map_err(Errata::from); Ok(result?) } diff --git a/src/cli/server/http_2.rs b/src/cli/server/http_2.rs index 30ee21b5d1..1895789603 100644 --- a/src/cli/server/http_2.rs +++ b/src/cli/server/http_2.rs @@ -9,9 +9,9 @@ use rustls_pki_types::{CertificateDer, PrivateKeyDer}; use tokio::sync::oneshot; use super::server_config::ServerConfig; -use crate::cli::CLIError; use crate::core::async_graphql_hyper::{GraphQLBatchRequest, GraphQLRequest}; use crate::core::http::handle_request; +use crate::core::Errata; pub async fn start_http_2( sc: Arc, @@ -60,7 +60,7 @@ pub async fn start_http_2( builder.serve(make_svc_single_req).await }; - let result = server.map_err(CLIError::from); + let result = server.map_err(Errata::from); Ok(result?) } diff --git a/src/cli/server/http_server.rs b/src/cli/server/http_server.rs index 62928c492f..3661c9f5f7 100644 --- a/src/cli/server/http_server.rs +++ b/src/cli/server/http_server.rs @@ -8,9 +8,9 @@ use super::http_1::start_http_1; use super::http_2::start_http_2; use super::server_config::ServerConfig; use crate::cli::telemetry::init_opentelemetry; -use crate::cli::CLIError; use crate::core::blueprint::{Blueprint, Http}; use crate::core::config::ConfigModule; +use crate::core::Errata; pub struct Server { config_module: ConfigModule, @@ -32,7 +32,7 @@ impl Server { /// Starts the server in the current Runtime pub async fn start(self) -> Result<()> { - let blueprint = Blueprint::try_from(&self.config_module).map_err(CLIError::from)?; + let blueprint = Blueprint::try_from(&self.config_module).map_err(Errata::from)?; let endpoints = self.config_module.extensions().endpoint_set.clone(); let server_config = Arc::new(ServerConfig::new(blueprint.clone(), endpoints).await?); diff --git a/src/cli/tc/check.rs b/src/cli/tc/check.rs index 9e41cb7a9d..6816836092 100644 --- a/src/cli/tc/check.rs +++ b/src/cli/tc/check.rs @@ -2,11 +2,11 @@ use anyhow::Result; use super::helpers::{display_schema, log_endpoint_set}; use crate::cli::fmt::Fmt; -use crate::cli::CLIError; use crate::core::blueprint::Blueprint; use crate::core::config::reader::ConfigReader; use crate::core::config::Source; use crate::core::runtime::TargetRuntime; +use crate::core::Errata; pub(super) struct CheckParams { pub(super) file_paths: Vec, @@ -24,7 +24,7 @@ pub(super) async fn check_command(params: CheckParams, config_reader: &ConfigRea if let Some(format) = format { Fmt::display(format.encode(&config_module)?); } - let blueprint = Blueprint::try_from(&config_module).map_err(CLIError::from); + let blueprint = Blueprint::try_from(&config_module).map_err(Errata::from); match blueprint { Ok(blueprint) => { diff --git a/src/cli/telemetry.rs b/src/cli/telemetry.rs index 46a64cba6c..50ea364d41 100644 --- a/src/cli/telemetry.rs +++ b/src/cli/telemetry.rs @@ -24,12 +24,12 @@ use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::{Layer, Registry}; use super::metrics::init_metrics; -use crate::cli::CLIError; use crate::core::blueprint::telemetry::{OtlpExporter, Telemetry, TelemetryExporter}; use crate::core::runtime::TargetRuntime; use crate::core::tracing::{ default_tracing, default_tracing_tailcall, get_log_level, tailcall_filter_target, }; +use crate::core::Errata; static RESOURCE: Lazy = Lazy::new(|| { Resource::default().merge(&Resource::new(vec![ @@ -206,8 +206,8 @@ pub fn init_opentelemetry(config: Telemetry, runtime: &TargetRuntime) -> anyhow: | global::Error::Log(LogError::Other(_)), ) { tracing::subscriber::with_default(default_tracing_tailcall(), || { - let cli = crate::cli::CLIError::new("Open Telemetry Error") - .caused_by(vec![CLIError::new(error.to_string().as_str())]) + let cli = crate::core::Errata::new("Open Telemetry Error") + .caused_by(vec![Errata::new(error.to_string().as_str())]) .trace(vec!["schema".to_string(), "@telemetry".to_string()]); tracing::error!("{}", cli.color(true)); }); diff --git a/src/cli/error.rs b/src/core/errata.rs similarity index 78% rename from src/cli/error.rs rename to src/core/errata.rs index 50e06e5301..7cc493ef3b 100644 --- a/src/cli/error.rs +++ b/src/core/errata.rs @@ -2,12 +2,15 @@ use std::fmt::{Debug, Display}; use colored::Colorize; use derive_setters::Setters; -use thiserror::Error; +use crate::core::error::Error as CoreError; use crate::core::valid::ValidationError; -#[derive(Debug, Error, Setters, PartialEq, Clone)] -pub struct CLIError { +/// The moral equivalent of a serde_json::Value but for errors. +/// It's a data structure like Value that can hold any error in an untyped +/// manner. +#[derive(Debug, thiserror::Error, Setters, PartialEq, Clone)] +pub struct Errata { is_root: bool, #[setters(skip)] color: bool, @@ -17,12 +20,12 @@ pub struct CLIError { trace: Vec, #[setters(skip)] - caused_by: Vec, + caused_by: Vec, } -impl CLIError { +impl Errata { pub fn new(message: &str) -> Self { - CLIError { + Errata { is_root: true, color: false, message: message.to_string(), @@ -32,7 +35,7 @@ impl CLIError { } } - pub fn caused_by(mut self, error: Vec) -> Self { + pub fn caused_by(mut self, error: Vec) -> Self { self.caused_by = error; for error in self.caused_by.iter_mut() { @@ -82,7 +85,7 @@ fn bullet(str: &str) -> String { chars.into_iter().collect::() } -impl Display for CLIError { +impl Display for Errata { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let default_padding = 2; @@ -132,46 +135,37 @@ impl Display for CLIError { } } -impl From for CLIError { +impl From for Errata { fn from(error: hyper::Error) -> Self { - // TODO: add type-safety to CLIError conversion - let cli_error = CLIError::new("Server Failed"); + // TODO: add type-safety to Errata conversion + let cli_error = Errata::new("Server Failed"); let message = error.to_string(); if message.to_lowercase().contains("os error 48") { cli_error .description("The port is already in use".to_string()) - .caused_by(vec![CLIError::new(message.as_str())]) + .caused_by(vec![Errata::new(message.as_str())]) } else { cli_error.description(message) } } } -impl From for CLIError { - fn from(error: rustls::Error) -> Self { - let cli_error = CLIError::new("Failed to create TLS Acceptor"); - let message = error.to_string(); - - cli_error.description(message) - } -} - -impl From for CLIError { +impl From for Errata { fn from(error: anyhow::Error) -> Self { - // Convert other errors to CLIError - let cli_error = match error.downcast::() { + // Convert other errors to Errata + let cli_error = match error.downcast::() { Ok(cli_error) => cli_error, Err(error) => { - // Convert other errors to CLIError + // Convert other errors to Errata let cli_error = match error.downcast::>() { - Ok(validation_error) => CLIError::from(validation_error), + Ok(validation_error) => Errata::from(validation_error), Err(error) => { let sources = error .source() - .map(|error| vec![CLIError::new(error.to_string().as_str())]) + .map(|error| vec![Errata::new(error.to_string().as_str())]) .unwrap_or_default(); - CLIError::new(&error.to_string()).caused_by(sources) + Errata::new(&error.to_string()).caused_by(sources) } }; cli_error @@ -181,24 +175,32 @@ impl From for CLIError { } } -impl From for CLIError { +impl From for Errata { fn from(error: std::io::Error) -> Self { - let cli_error = CLIError::new("IO Error"); + let cli_error = Errata::new("IO Error"); let message = error.to_string(); cli_error.description(message) } } -impl<'a> From> for CLIError { +impl From for Errata { + fn from(error: CoreError) -> Self { + let cli_error = Errata::new("Core Error"); + let message = error.to_string(); + + cli_error.description(message) + } +} + +impl<'a> From> for Errata { fn from(error: ValidationError<&'a str>) -> Self { - CLIError::new("Invalid Configuration").caused_by( + Errata::new("Invalid Configuration").caused_by( error .as_vec() .iter() .map(|cause| { - let mut err = - CLIError::new(cause.message).trace(Vec::from(cause.trace.clone())); + let mut err = Errata::new(cause.message).trace(Vec::from(cause.trace.clone())); if let Some(description) = cause.description { err = err.description(description.to_owned()); } @@ -209,29 +211,28 @@ impl<'a> From> for CLIError { } } -impl From> for CLIError { +impl From> for Errata { fn from(error: ValidationError) -> Self { - CLIError::new("Invalid Configuration").caused_by( + Errata::new("Invalid Configuration").caused_by( error .as_vec() .iter() .map(|cause| { - CLIError::new(cause.message.as_str()).trace(Vec::from(cause.trace.clone())) + Errata::new(cause.message.as_str()).trace(Vec::from(cause.trace.clone())) }) .collect(), ) } } -impl From> for CLIError { +impl From> for Errata { fn from(value: Box) -> Self { - CLIError::new(value.to_string().as_str()) + Errata::new(value.to_string().as_str()) } } #[cfg(test)] mod tests { - use pretty_assertions::assert_eq; use stripmargin::StripMargin; @@ -275,14 +276,14 @@ mod tests { #[test] fn test_title() { - let error = CLIError::new("Server could not be started"); + let error = Errata::new("Server could not be started"); let expected = r"Server could not be started".strip_margin(); assert_eq!(error.to_string(), expected); } #[test] fn test_title_description() { - let error = CLIError::new("Server could not be started") + let error = Errata::new("Server could not be started") .description("The port is already in use".to_string()); let expected = r"|Server could not be started: The port is already in use".strip_margin(); @@ -291,7 +292,7 @@ mod tests { #[test] fn test_title_description_trace() { - let error = CLIError::new("Server could not be started") + let error = Errata::new("Server could not be started") .description("The port is already in use".to_string()) .trace(vec!["@server".into(), "port".into()]); @@ -304,7 +305,7 @@ mod tests { #[test] fn test_title_trace_caused_by() { - let error = CLIError::new("Configuration Error").caused_by(vec![CLIError::new( + let error = Errata::new("Configuration Error").caused_by(vec![Errata::new( "Base URL needs to be specified", ) .trace(vec![ @@ -324,20 +325,20 @@ mod tests { #[test] fn test_title_trace_multiple_caused_by() { - let error = CLIError::new("Configuration Error").caused_by(vec![ - CLIError::new("Base URL needs to be specified").trace(vec![ + let error = Errata::new("Configuration Error").caused_by(vec![ + Errata::new("Base URL needs to be specified").trace(vec![ "User".into(), "posts".into(), "@http".into(), "baseURL".into(), ]), - CLIError::new("Base URL needs to be specified").trace(vec![ + Errata::new("Base URL needs to be specified").trace(vec![ "Post".into(), "users".into(), "@http".into(), "baseURL".into(), ]), - CLIError::new("Base URL needs to be specified") + Errata::new("Base URL needs to be specified") .description("Set `baseURL` in @http or @server directives".into()) .trace(vec![ "Query".into(), @@ -345,7 +346,7 @@ mod tests { "@http".into(), "baseURL".into(), ]), - CLIError::new("Base URL needs to be specified").trace(vec![ + Errata::new("Base URL needs to be specified").trace(vec![ "Query".into(), "posts".into(), "@http".into(), @@ -370,7 +371,7 @@ mod tests { .description("Set `baseURL` in @http or @server directives") .trace(vec!["Query", "users", "@http", "baseURL"]); let valid = ValidationError::from(cause); - let error = CLIError::from(valid); + let error = Errata::from(valid); let expected = r"|Invalid Configuration |Caused by: | • Base URL needs to be specified: Set `baseURL` in @http or @server directives [at Query.users.@http.baseURL]" @@ -381,12 +382,12 @@ mod tests { #[test] fn test_cli_error_identity() { - let cli_error = CLIError::new("Server could not be started") + let cli_error = Errata::new("Server could not be started") .description("The port is already in use".to_string()) .trace(vec!["@server".into(), "port".into()]); let anyhow_error: anyhow::Error = cli_error.clone().into(); - let actual = CLIError::from(anyhow_error); + let actual = Errata::from(anyhow_error); let expected = cli_error; assert_eq!(actual, expected); @@ -399,8 +400,8 @@ mod tests { ); let anyhow_error: anyhow::Error = validation_error.clone().into(); - let actual = CLIError::from(anyhow_error); - let expected = CLIError::from(validation_error); + let actual = Errata::from(anyhow_error); + let expected = Errata::from(validation_error); assert_eq!(actual, expected); } @@ -409,8 +410,8 @@ mod tests { fn test_generic_error() { let anyhow_error = anyhow::anyhow!("Some error msg"); - let actual: CLIError = CLIError::from(anyhow_error); - let expected = CLIError::new("Some error msg"); + let actual: Errata = Errata::from(anyhow_error); + let expected = Errata::new("Some error msg"); assert_eq!(actual, expected); } diff --git a/src/core/grpc/request.rs b/src/core/grpc/request.rs index c7e28b53e3..7bea4ccd68 100644 --- a/src/core/grpc/request.rs +++ b/src/core/grpc/request.rs @@ -160,7 +160,7 @@ mod tests { if let Err(err) = result { match err.downcast_ref::() { - Some(Error::GRPCError { + Some(Error::GRPC { grpc_code, grpc_description, grpc_status_message, diff --git a/src/core/http/response.rs b/src/core/http/response.rs index 2bb28e2b93..710ab0ac1a 100644 --- a/src/core/http/response.rs +++ b/src/core/http/response.rs @@ -102,9 +102,7 @@ impl Response { pub fn to_grpc_error(&self, operation: &ProtobufOperation) -> anyhow::Error { let grpc_status = match Status::from_header_map(&self.headers) { Some(status) => status, - None => { - return Error::IOException("Error while parsing upstream headers".to_owned()).into() - } + None => return Error::IO("Error while parsing upstream headers".to_owned()).into(), }; let mut obj: IndexMap = IndexMap::new(); @@ -136,7 +134,7 @@ impl Response { } obj.insert(Name::new("details"), ConstValue::List(status_details)); - let error = Error::GRPCError { + let error = Error::GRPC { grpc_code: grpc_status.code() as i32, grpc_description: grpc_status.code().description().to_owned(), grpc_status_message: grpc_status.message().to_owned(), diff --git a/src/core/ir/error.rs b/src/core/ir/error.rs index e08523bb71..2bcd513f83 100644 --- a/src/core/ir/error.rs +++ b/src/core/ir/error.rs @@ -1,48 +1,75 @@ +use std::fmt::Display; use std::sync::Arc; use async_graphql::{ErrorExtensions, Value as ConstValue}; use derive_more::From; use thiserror::Error; -use crate::core::{auth, cache, worker}; +use crate::core::{auth, cache, worker, Errata}; #[derive(From, Debug, Error, Clone)] pub enum Error { - #[error("IOException: {0}")] - IOException(String), + IO(String), - #[error("gRPC Error: status: {grpc_code}, description: `{grpc_description}`, message: `{grpc_status_message}`")] - GRPCError { + GRPC { grpc_code: i32, grpc_description: String, grpc_status_message: String, grpc_status_details: ConstValue, }, - #[error("APIValidationError: {0:?}")] - APIValidationError(Vec), + APIValidation(Vec), - #[error("ExprEvalError: {0}")] #[from(ignore)] - ExprEvalError(String), + ExprEval(String), - #[error("DeserializeError: {0}")] #[from(ignore)] - DeserializeError(String), + Deserialize(String), - #[error("Authentication Failure: {0}")] - AuthError(auth::error::Error), + Auth(auth::error::Error), - #[error("Worker Error: {0}")] - WorkerError(worker::Error), + Worker(worker::Error), - #[error("Cache Error: {0}")] - CacheError(cache::Error), + Cache(cache::Error), +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + Errata::from(self.to_owned()).fmt(f) + } +} + +impl From for Errata { + fn from(value: Error) -> Self { + match value { + Error::IO(message) => Errata::new("IOException").description(message), + Error::GRPC { + grpc_code, + grpc_description, + grpc_status_message, + grpc_status_details: _, + } => Errata::new("gRPC Error") + .description(format!("status: {grpc_code}, description: `{grpc_description}`, message: `{grpc_status_message}`")), + Error::APIValidation(errors) => Errata::new("API Validation Error") + .caused_by(errors.iter().map(|e| Errata::new(e)).collect::>()), + Error::Deserialize(message) => { + Errata::new("Deserialization Error").description(message) + } + Error::ExprEval(message) => { + Errata::new("Expression Evaluation Error").description(message) + } + Error::Auth(err) => { + Errata::new("Authentication Failure").description(err.to_string()) + } + Error::Worker(err) => Errata::new("Worker Error").description(err.to_string()), + Error::Cache(err) => Errata::new("Cache Error").description(err.to_string()), + } + } } impl ErrorExtensions for Error { fn extend(&self) -> async_graphql::Error { async_graphql::Error::new(format!("{}", self)).extend_with(|_err, e| { - if let Error::GRPCError { + if let Error::GRPC { grpc_code, grpc_description, grpc_status_message, @@ -60,7 +87,7 @@ impl ErrorExtensions for Error { impl<'a> From> for Error { fn from(value: crate::core::valid::ValidationError<&'a str>) -> Self { - Error::APIValidationError( + Error::APIValidation( value .as_vec() .iter() @@ -74,7 +101,7 @@ impl From> for Error { fn from(error: Arc) -> Self { match error.downcast_ref::() { Some(err) => err.clone(), - None => Error::IOException(error.to_string()), + None => Error::IO(error.to_string()), } } } @@ -86,7 +113,7 @@ impl From for Error { fn from(value: anyhow::Error) -> Self { match value.downcast::() { Ok(err) => err, - Err(err) => Error::IOException(err.to_string()), + Err(err) => Error::IO(err.to_string()), } } } diff --git a/src/core/ir/eval.rs b/src/core/ir/eval.rs index 6bbf8871bd..c0d8c2356a 100644 --- a/src/core/ir/eval.rs +++ b/src/core/ir/eval.rs @@ -72,13 +72,10 @@ impl IR { if let Some(value) = map.get(&key) { Ok(ConstValue::String(value.to_owned())) } else { - Err(Error::ExprEvalError(format!( - "Can't find mapped key: {}.", - key - ))) + Err(Error::ExprEval(format!("Can't find mapped key: {}.", key))) } } else { - Err(Error::ExprEvalError( + Err(Error::ExprEval( "Mapped key must be string value.".to_owned(), )) } diff --git a/src/core/ir/eval_http.rs b/src/core/ir/eval_http.rs index bc99ef72a9..446eb9009a 100644 --- a/src/core/ir/eval_http.rs +++ b/src/core/ir/eval_http.rs @@ -236,7 +236,7 @@ pub fn parse_graphql_response( field_name: &str, ) -> Result { let res: async_graphql::Response = - from_value(res.body).map_err(|err| Error::DeserializeError(err.to_string()))?; + from_value(res.body).map_err(|err| Error::Deserialize(err.to_string()))?; for error in res.errors { ctx.add_error(error); diff --git a/src/core/mod.rs b/src/core/mod.rs index 5ca24f55e9..a885e5ab4f 100644 --- a/src/core/mod.rs +++ b/src/core/mod.rs @@ -12,6 +12,7 @@ pub mod data_loader; pub mod directive; pub mod document; pub mod endpoint; +mod errata; pub mod error; pub mod generator; pub mod graphql; @@ -47,6 +48,7 @@ use std::hash::Hash; use std::num::NonZeroU64; use async_graphql_value::ConstValue; +pub use errata::Errata; pub use error::{Error, Result}; use http::Response; use ir::model::IoId; diff --git a/src/main.rs b/src/main.rs index 3e912d28ad..b2dc98e001 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,8 +3,8 @@ use std::cell::Cell; -use tailcall::cli::CLIError; use tailcall::core::tracing::default_tracing_tailcall; +use tailcall::core::Errata; use tracing::subscriber::DefaultGuard; thread_local! { @@ -42,8 +42,8 @@ fn main() -> anyhow::Result<()> { match result { Ok(_) => {} Err(error) => { - // Ensure all errors are converted to CLIErrors before being printed. - let cli_error: CLIError = error.into(); + // Ensure all errors are converted to Errata before being printed. + let cli_error: Errata = error.into(); tracing::error!("{}", cli_error.color(true)); std::process::exit(exitcode::CONFIG); } From 6b51d4e5eaa4b92a65b5e5556da2380e0e88dbfd Mon Sep 17 00:00:00 2001 From: Sandipsinh Dilipsinh Rathod <62684960+ssddOnTop@users.noreply.github.com> Date: Tue, 27 Aug 2024 13:10:00 +0000 Subject: [PATCH 4/8] chore: convert cli tests to md (#2755) Co-authored-by: Tushar Mathur --- .../{gen_deezer.json => gen_deezer.md} | 2 + ...nfig.json => gen_json_proto_mix_config.md} | 4 +- ...laceholder.json => gen_jsonplaceholder.md} | 2 + tests/cli/gen.rs | 202 ++++++++++++++++-- tests/cli/parser.rs | 132 ++++++++++++ 5 files changed, 323 insertions(+), 19 deletions(-) rename tests/cli/fixtures/generator/{gen_deezer.json => gen_deezer.md} (98%) rename tests/cli/fixtures/generator/{gen_json_proto_mix_config.json => gen_json_proto_mix_config.md} (83%) rename tests/cli/fixtures/generator/{gen_jsonplaceholder.json => gen_jsonplaceholder.md} (98%) create mode 100644 tests/cli/parser.rs diff --git a/tests/cli/fixtures/generator/gen_deezer.json b/tests/cli/fixtures/generator/gen_deezer.md similarity index 98% rename from tests/cli/fixtures/generator/gen_deezer.json rename to tests/cli/fixtures/generator/gen_deezer.md index 42e2aafd1e..6fb4f47ac9 100644 --- a/tests/cli/fixtures/generator/gen_deezer.json +++ b/tests/cli/fixtures/generator/gen_deezer.md @@ -1,3 +1,4 @@ +```json @config { "inputs": [ { @@ -63,3 +64,4 @@ "query": "Query" } } +``` diff --git a/tests/cli/fixtures/generator/gen_json_proto_mix_config.json b/tests/cli/fixtures/generator/gen_json_proto_mix_config.md similarity index 83% rename from tests/cli/fixtures/generator/gen_json_proto_mix_config.json rename to tests/cli/fixtures/generator/gen_json_proto_mix_config.md index a521d34e29..1da85764b2 100644 --- a/tests/cli/fixtures/generator/gen_json_proto_mix_config.json +++ b/tests/cli/fixtures/generator/gen_json_proto_mix_config.md @@ -1,3 +1,4 @@ +```json @config { "inputs": [ { @@ -8,7 +9,7 @@ }, { "proto": { - "src": "../../../../../../tailcall-fixtures/fixtures/protobuf/news.proto" + "src": "tailcall-fixtures/fixtures/protobuf/news.proto" } } ], @@ -26,3 +27,4 @@ "query": "Query" } } +``` diff --git a/tests/cli/fixtures/generator/gen_jsonplaceholder.json b/tests/cli/fixtures/generator/gen_jsonplaceholder.md similarity index 98% rename from tests/cli/fixtures/generator/gen_jsonplaceholder.json rename to tests/cli/fixtures/generator/gen_jsonplaceholder.md index cc81a23152..2feead7b4f 100644 --- a/tests/cli/fixtures/generator/gen_jsonplaceholder.json +++ b/tests/cli/fixtures/generator/gen_jsonplaceholder.md @@ -1,3 +1,4 @@ +```json @config { "inputs": [ { @@ -81,3 +82,4 @@ "query": "Query" } } +``` diff --git a/tests/cli/gen.rs b/tests/cli/gen.rs index fa70fd4488..90955a5fa1 100644 --- a/tests/cli/gen.rs +++ b/tests/cli/gen.rs @@ -1,6 +1,166 @@ +mod parser; + +pub mod cacache_manager { + use std::io::{Read, Write}; + use std::path::PathBuf; + + use flate2::write::GzEncoder; + use flate2::Compression; + use http_cache_reqwest::{CacheManager, HttpResponse}; + use http_cache_semantics::CachePolicy; + use serde::{Deserialize, Serialize}; + + pub type BoxError = Box; + pub type Result = std::result::Result; + + pub struct CaCacheManager { + path: PathBuf, + } + + #[derive(Clone, Deserialize, Serialize)] + pub struct Store { + response: HttpResponse, + policy: CachePolicy, + } + + impl Default for CaCacheManager { + fn default() -> Self { + Self { path: PathBuf::from("./.cache") } + } + } + + #[async_trait::async_trait] + impl CacheManager for CaCacheManager { + async fn put( + &self, + cache_key: String, + response: HttpResponse, + policy: CachePolicy, + ) -> Result { + let data = Store { response: response.clone(), policy }; + let bytes = bincode::serialize(&data)?; + + let mut encoder = GzEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(&bytes)?; + let compressed_bytes = encoder.finish()?; + + cacache::write(&self.path, cache_key, compressed_bytes).await?; + Ok(response) + } + + async fn get(&self, cache_key: &str) -> Result> { + match cacache::read(&self.path, cache_key).await { + Ok(compressed_data) => { + let mut decoder = flate2::read::GzDecoder::new(compressed_data.as_slice()); + let mut serialized_data = Vec::new(); + decoder.read_to_end(&mut serialized_data)?; + let store: Store = bincode::deserialize(&serialized_data)?; + Ok(Some((store.response, store.policy))) + } + Err(_) => Ok(None), + } + } + + async fn delete(&self, cache_key: &str) -> Result<()> { + Ok(cacache::remove(&self.path, cache_key).await?) + } + } +} + +pub mod file { + use std::collections::HashMap; + use std::sync::Arc; + + use async_trait::async_trait; + use tailcall::core::FileIO; + use tokio::sync::RwLock; + + #[derive(Clone, Default)] + pub struct NativeFileTest(Arc>>); + #[async_trait] + impl FileIO for NativeFileTest { + async fn write<'a>(&'a self, path: &'a str, content: &'a [u8]) -> anyhow::Result<()> { + self.0.write().await.insert( + path.to_string(), + String::from_utf8_lossy(content).to_string(), + ); + Ok(()) + } + + async fn read<'a>(&'a self, path: &'a str) -> anyhow::Result { + let val = if let Some(val) = self.0.read().await.get(path).cloned() { + val + } else { + std::fs::read_to_string(path)? + }; + Ok(val) + } + } +} + +pub mod http { + use anyhow::Result; + use http_cache_reqwest::{Cache, CacheMode, HttpCache, HttpCacheOptions}; + use hyper::body::Bytes; + use reqwest::Client; + use reqwest_middleware::{ClientBuilder, ClientWithMiddleware}; + use tailcall::core::http::Response; + use tailcall::core::HttpIO; + + use super::cacache_manager::CaCacheManager; + + #[derive(Clone)] + pub struct NativeHttpTest { + client: ClientWithMiddleware, + } + + impl Default for NativeHttpTest { + fn default() -> Self { + let mut client = ClientBuilder::new(Client::new()); + client = client.with(Cache(HttpCache { + mode: CacheMode::ForceCache, + manager: CaCacheManager::default(), + options: HttpCacheOptions::default(), + })); + Self { client: client.build() } + } + } + + #[async_trait::async_trait] + impl HttpIO for NativeHttpTest { + #[allow(clippy::blocks_in_conditions)] + async fn execute(&self, request: reqwest::Request) -> Result> { + let response = self.client.execute(request).await; + Ok(Response::from_reqwest( + response? + .error_for_status() + .map_err(|err| err.without_url())?, + ) + .await?) + } + } +} +pub mod env { + use std::borrow::Cow; + use std::collections::HashMap; + + use tailcall::core::EnvIO; + + #[derive(Clone)] + pub struct Env(pub HashMap); + + impl EnvIO for Env { + fn get(&self, key: &str) -> Option> { + self.0.get(key).map(Cow::from) + } + } +} + pub mod test { use std::path::Path; + use crate::parser::ExecutionSpec; + mod cacache_manager { use std::io::{Read, Write}; use std::path::PathBuf; @@ -120,25 +280,31 @@ pub mod test { use tailcall::core::config::{self, ConfigModule}; use tailcall::core::generator::Generator as ConfigGenerator; use tailcall::core::valid::{ValidateInto, Validator}; - use tokio::runtime::Runtime; use super::http::NativeHttpTest; + use crate::env::Env; + use crate::parser::{ExecutionSpec, IO}; - pub fn run_config_generator_spec(path: &Path) -> datatest_stable::Result<()> { - let path = path.to_path_buf(); - let runtime = Runtime::new().unwrap(); - runtime.block_on(async move { - run_test(&path.to_string_lossy()).await?; - Ok(()) - }) - } + pub async fn run_test(original_path: &Path, spec: ExecutionSpec) -> anyhow::Result<()> { + let snapshot_name = original_path.to_string_lossy().to_string(); + + let IO { fs, paths } = spec.configs.into_io().await; + let path = paths.first().unwrap().as_str(); - async fn run_test(path: &str) -> anyhow::Result<()> { let mut runtime = tailcall::cli::runtime::init(&Blueprint::default()); runtime.http = Arc::new(NativeHttpTest::default()); + runtime.file = Arc::new(fs); + if let Some(env) = spec.env { + runtime.env = Arc::new(Env(env)) + } let generator = Generator::new(path, runtime); let config = generator.read().await?; + if spec.debug_assert_config { + insta::assert_debug_snapshot!(snapshot_name, config); + return Ok(()); + } + let query_type = config.schema.query.clone().unwrap_or("Query".into()); let mutation_type_name = config.schema.mutation.clone(); let preset: config::transformer::Preset = config @@ -164,11 +330,11 @@ pub mod test { let config = ConfigModule::from(base_config); - insta::assert_snapshot!(path, config.to_sdl()); + insta::assert_snapshot!(snapshot_name, config.to_sdl()); Ok(()) } } - pub fn test_generator(path: &Path) -> datatest_stable::Result<()> { + async fn test_generator(path: &Path) -> datatest_stable::Result<()> { if let Some(extension) = path.extension() { if extension == "json" && path @@ -177,15 +343,15 @@ pub mod test { .map(|v| v.starts_with("gen")) .unwrap_or_default() { - let _ = generator_spec::run_config_generator_spec(path); + let spec = ExecutionSpec::from_source(path, std::fs::read_to_string(path)?)?; + generator_spec::run_test(path, spec).await?; } } Ok(()) } + pub fn run(path: &Path) -> datatest_stable::Result<()> { + tokio_test::block_on(test_generator(path)) + } } -datatest_stable::harness!( - test::test_generator, - "tests/cli/fixtures/generator", - r"^.*\.json" -); +datatest_stable::harness!(test::run, "tests/cli/fixtures/generator", r"^.*\.md"); diff --git a/tests/cli/parser.rs b/tests/cli/parser.rs new file mode 100644 index 0000000000..32d826f953 --- /dev/null +++ b/tests/cli/parser.rs @@ -0,0 +1,132 @@ +use std::collections::HashMap; +use std::path::Path; +use std::str::FromStr; + +use anyhow::anyhow; +use markdown::mdast::Node; +use markdown::ParseOptions; +use tailcall::core::config::Source; +use tailcall::core::FileIO; + +use crate::file::NativeFileTest; + +#[derive(Clone)] +pub struct ExecutionSpec { + pub env: Option>, + pub configs: ConfigHolder, + + // if this is set to true, + // then we will assert Config + // instead of asserting the generated config + pub debug_assert_config: bool, +} + +pub struct IO { + pub fs: NativeFileTest, + pub paths: Vec, +} + +#[derive(Clone)] +pub struct ConfigHolder { + configs: Vec<(Source, String)>, +} + +impl ConfigHolder { + pub async fn into_io(self) -> IO { + let fs = NativeFileTest::default(); + let mut paths = vec![]; + for (i, (source, content)) in self.configs.iter().enumerate() { + let path = format!("config{}.{}", i, source.ext()); + fs.write(&path, content.as_bytes()).await.unwrap(); + paths.push(path); + } + IO { fs, paths } + } +} + +impl ExecutionSpec { + pub fn from_source(path: &Path, contents: String) -> anyhow::Result { + let ast = markdown::to_mdast(&contents, &ParseOptions::default()).unwrap(); + let children = ast + .children() + .unwrap_or_else(|| panic!("Failed to parse {:?}: empty file unexpected", path)) + .iter() + .peekable(); + + let mut env = None; + let mut debug_assert_config = false; + let mut configs = vec![]; + + for node in children { + match node { + Node::Heading(heading) => { + if heading.depth == 2 { + if let Some(Node::Text(expect)) = heading.children.first() { + let split = expect.value.splitn(2, ':').collect::>(); + match split[..] { + [a, b] => { + debug_assert_config = + a.contains("debug_assert") && b.ends_with("true"); + } + _ => { + return Err(anyhow!( + "Unexpected header annotation {:?} in {:?}", + expect.value, + path, + )) + } + } + } + } + } + Node::Code(code) => { + let (content, lang, meta) = { + ( + code.value.to_owned(), + code.lang.to_owned(), + code.meta.to_owned(), + ) + }; + if let Some(meta_str) = meta.as_ref().filter(|s| s.contains('@')) { + let temp_cleaned_meta = meta_str.replace('@', ""); + let name: &str = &temp_cleaned_meta; + + let lang = match lang { + Some(x) => Ok(x), + None => Err(anyhow!( + "Unexpected code block with no specific language in {:?}", + path + )), + }?; + let source = Source::from_str(&lang)?; + match name { + "config" => { + configs.push((source, content)); + } + "env" => { + let vars: HashMap = match source { + Source::Json => Ok(serde_json::from_str(&content)?), + Source::Yml => Ok(serde_yaml::from_str(&content)?), + _ => Err(anyhow!("Unexpected language in env block in {:?} (only JSON and YAML are supported)", path)), + }?; + + env = Some(vars); + } + _ => { + return Err(anyhow!( + "Unexpected component {:?} in {:?}: {:#?}", + name, + path, + meta + )); + } + } + } + } + _ => return Err(anyhow!("Unexpected node in {:?}: {:#?}", path, node)), + } + } + + Ok(Self { env, configs: ConfigHolder { configs }, debug_assert_config }) + } +} From 4ab32d4634dcc2be3a576f7b057058634b0bb3f3 Mon Sep 17 00:00:00 2001 From: neo773 <62795688+neo773@users.noreply.github.com> Date: Tue, 27 Aug 2024 20:11:25 +0530 Subject: [PATCH 5/8] chore: drop tag (#2761) --- tests/execution/grpc-map.md | 4 ++-- tests/execution/grpc-oneof.md | 34 +++++++++++++++++----------------- 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/tests/execution/grpc-map.md b/tests/execution/grpc-map.md index a116ef83a1..70049b195d 100644 --- a/tests/execution/grpc-map.md +++ b/tests/execution/grpc-map.md @@ -31,7 +31,7 @@ schema @server @upstream { query: Query } -input map__MapRequest @tag(id: "map.MapRequest") { +input map__MapRequest { map: JSON! } @@ -40,7 +40,7 @@ type Query { @grpc(body: "{{.args.mapRequest}}", method: "map.MapService.GetMap") } -type map__MapResponse @tag(id: "map.MapResponse") { +type map__MapResponse { map: JSON! } ``` diff --git a/tests/execution/grpc-oneof.md b/tests/execution/grpc-oneof.md index a407cc5a76..eb5857799e 100644 --- a/tests/execution/grpc-oneof.md +++ b/tests/execution/grpc-oneof.md @@ -46,58 +46,58 @@ schema query: Query } -input oneof__CommandInput @tag(id: "oneof.Command") { +input oneof__CommandInput { command: String } -input oneof__PayloadInput @tag(id: "oneof.Payload") { +input oneof__PayloadInput { payload: String } -input oneof__Request__Var0__Var @tag(id: "oneof.Request") { +input oneof__Request__Var0__Var { payload: oneof__PayloadInput! usual: String } -input oneof__Request__Var0__Var0 @tag(id: "oneof.Request") { +input oneof__Request__Var0__Var0 { flag: Boolean! payload: oneof__PayloadInput! usual: String } -input oneof__Request__Var0__Var1 @tag(id: "oneof.Request") { +input oneof__Request__Var0__Var1 { optPayload: oneof__PayloadInput! payload: oneof__PayloadInput! usual: String } -input oneof__Request__Var1__Var @tag(id: "oneof.Request") { +input oneof__Request__Var1__Var { command: oneof__CommandInput! usual: String } -input oneof__Request__Var1__Var0 @tag(id: "oneof.Request") { +input oneof__Request__Var1__Var0 { command: oneof__CommandInput! flag: Boolean! usual: String } -input oneof__Request__Var1__Var1 @tag(id: "oneof.Request") { +input oneof__Request__Var1__Var1 { command: oneof__CommandInput! optPayload: oneof__PayloadInput! usual: String } -input oneof__Request__Var__Var @tag(id: "oneof.Request") { +input oneof__Request__Var__Var { usual: String } -input oneof__Request__Var__Var0 @tag(id: "oneof.Request") { +input oneof__Request__Var__Var0 { flag: Boolean! usual: String } -input oneof__Request__Var__Var1 @tag(id: "oneof.Request") { +input oneof__Request__Var__Var1 { optPayload: oneof__PayloadInput! usual: String } @@ -125,29 +125,29 @@ type Query { @grpc(body: "{{.args.request}}", method: "oneof.OneOfService.GetOneOf") } -type oneof__Command @tag(id: "oneof.Command") { +type oneof__Command { command: String } -type oneof__Payload @tag(id: "oneof.Payload") { +type oneof__Payload { payload: String } -type oneof__Response__Var @tag(id: "oneof.Response") { +type oneof__Response__Var { usual: Int } -type oneof__Response__Var0 @tag(id: "oneof.Response") { +type oneof__Response__Var0 { payload: oneof__Payload! usual: Int } -type oneof__Response__Var1 @tag(id: "oneof.Response") { +type oneof__Response__Var1 { command: oneof__Command! usual: Int } -type oneof__Response__Var2 @tag(id: "oneof.Response") { +type oneof__Response__Var2 { response: String! usual: Int } From f6942bca7c1e067187041b31643413deac421948 Mon Sep 17 00:00:00 2001 From: Sahil Yeole <73148455+beelchester@users.noreply.github.com> Date: Tue, 27 Aug 2024 20:27:09 +0530 Subject: [PATCH 6/8] chore(2760): move impl Field from synth.rs to model.rs (#2763) Co-authored-by: Tushar Mathur --- src/core/jit/model.rs | 19 +++++++++++++++++++ src/core/jit/synth/synth.rs | 20 +------------------- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/src/core/jit/model.rs b/src/core/jit/model.rs index 2fe51567fa..bcee777ce4 100644 --- a/src/core/jit/model.rs +++ b/src/core/jit/model.rs @@ -10,6 +10,7 @@ use serde::{Deserialize, Serialize}; use super::Error; use crate::core::blueprint::Index; use crate::core::ir::model::IR; +use crate::core::json::JsonLike; #[derive(Debug, Deserialize, Clone)] pub struct Variables(HashMap); @@ -48,6 +49,24 @@ impl FromIterator<(String, V)> for Variables { } } +impl Field { + #[inline(always)] + pub fn skip<'json, Value: JsonLike<'json>>(&self, variables: &Variables) -> bool { + let eval = + |variable_option: Option<&Variable>, variables: &Variables, default: bool| { + variable_option + .map(|a| a.as_str()) + .and_then(|name| variables.get(name)) + .and_then(|value| value.as_bool()) + .unwrap_or(default) + }; + let skip = eval(self.skip.as_ref(), variables, false); + let include = eval(self.include.as_ref(), variables, true); + + skip == include + } +} + #[derive(Debug, Clone)] pub struct Arg { pub id: ArgId, diff --git a/src/core/jit/synth/synth.rs b/src/core/jit/synth/synth.rs index bcc0fb13e0..f319e97b0e 100644 --- a/src/core/jit/synth/synth.rs +++ b/src/core/jit/synth/synth.rs @@ -1,5 +1,5 @@ use crate::core::ir::TypedValue; -use crate::core::jit::model::{Field, Nested, OperationPlan, Variable, Variables}; +use crate::core::jit::model::{Field, Nested, OperationPlan, Variables}; use crate::core::jit::store::{Data, DataPath, Store}; use crate::core::jit::{Error, PathSegment, Positioned, ValidationError}; use crate::core::json::{JsonLike, JsonObjectLike}; @@ -13,24 +13,6 @@ pub struct Synth { variables: Variables, } -impl Field { - #[inline(always)] - pub fn skip<'json, Value: JsonLike<'json>>(&self, variables: &Variables) -> bool { - let eval = - |variable_option: Option<&Variable>, variables: &Variables, default: bool| { - variable_option - .map(|a| a.as_str()) - .and_then(|name| variables.get(name)) - .and_then(|value| value.as_bool()) - .unwrap_or(default) - }; - let skip = eval(self.skip.as_ref(), variables, false); - let include = eval(self.include.as_ref(), variables, true); - - skip == include - } -} - impl Synth { #[inline(always)] pub fn new( From 7be002f3f8177a07653df817b806c332efb999b9 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 27 Aug 2024 14:58:21 +0000 Subject: [PATCH 7/8] chore(deps): update dependency tsx to v4.19.0 --- npm/package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/npm/package-lock.json b/npm/package-lock.json index acba9bbde7..70ea8d98db 100644 --- a/npm/package-lock.json +++ b/npm/package-lock.json @@ -843,9 +843,9 @@ } }, "node_modules/tsx": { - "version": "4.18.0", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.18.0.tgz", - "integrity": "sha512-a1jaKBSVQkd6yEc1/NI7G6yHFfefIcuf3QJST7ZEyn4oQnxLYrZR5uZAM8UrwUa3Ge8suiZHcNS1gNrEvmobqg==", + "version": "4.19.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.19.0.tgz", + "integrity": "sha512-bV30kM7bsLZKZIOCHeMNVMJ32/LuJzLVajkQI/qf92J2Qr08ueLQvW00PUZGiuLPP760UINwupgUj8qrSCPUKg==", "dev": true, "license": "MIT", "dependencies": { From aeb1043704c745e432f9a3d34b5725f214cb2c68 Mon Sep 17 00:00:00 2001 From: Panagiotis Karatakis Date: Tue, 27 Aug 2024 23:10:25 +0300 Subject: [PATCH 8/8] fix(jit): raise validation error for required fields (#2754) Co-authored-by: Tushar Mathur --- src/core/blueprint/blueprint.rs | 1 + src/core/jit/synth/synth.rs | 38 +- .../snapshots/test-required-fields.md_0.snap | 18 + .../snapshots/test-required-fields.md_1.snap | 28 ++ .../snapshots/test-required-fields.md_10.snap | 24 ++ .../snapshots/test-required-fields.md_11.snap | 15 + .../snapshots/test-required-fields.md_12.snap | 29 ++ .../snapshots/test-required-fields.md_13.snap | 29 ++ .../snapshots/test-required-fields.md_14.snap | 24 ++ .../snapshots/test-required-fields.md_15.snap | 27 ++ .../snapshots/test-required-fields.md_16.snap | 29 ++ .../snapshots/test-required-fields.md_17.snap | 29 ++ .../snapshots/test-required-fields.md_18.snap | 24 ++ .../snapshots/test-required-fields.md_19.snap | 15 + .../snapshots/test-required-fields.md_2.snap | 27 ++ .../snapshots/test-required-fields.md_20.snap | 29 ++ .../snapshots/test-required-fields.md_21.snap | 29 ++ .../snapshots/test-required-fields.md_3.snap | 18 + .../snapshots/test-required-fields.md_4.snap | 28 ++ .../snapshots/test-required-fields.md_5.snap | 15 + .../snapshots/test-required-fields.md_6.snap | 24 ++ .../snapshots/test-required-fields.md_7.snap | 27 ++ .../snapshots/test-required-fields.md_8.snap | 29 ++ .../snapshots/test-required-fields.md_9.snap | 29 ++ .../test-required-fields.md_client.snap | 73 ++++ .../test-required-fields.md_merged.snap | 37 ++ tests/execution/test-required-fields.md | 358 ++++++++++++++++++ 27 files changed, 1042 insertions(+), 11 deletions(-) create mode 100644 tests/core/snapshots/test-required-fields.md_0.snap create mode 100644 tests/core/snapshots/test-required-fields.md_1.snap create mode 100644 tests/core/snapshots/test-required-fields.md_10.snap create mode 100644 tests/core/snapshots/test-required-fields.md_11.snap create mode 100644 tests/core/snapshots/test-required-fields.md_12.snap create mode 100644 tests/core/snapshots/test-required-fields.md_13.snap create mode 100644 tests/core/snapshots/test-required-fields.md_14.snap create mode 100644 tests/core/snapshots/test-required-fields.md_15.snap create mode 100644 tests/core/snapshots/test-required-fields.md_16.snap create mode 100644 tests/core/snapshots/test-required-fields.md_17.snap create mode 100644 tests/core/snapshots/test-required-fields.md_18.snap create mode 100644 tests/core/snapshots/test-required-fields.md_19.snap create mode 100644 tests/core/snapshots/test-required-fields.md_2.snap create mode 100644 tests/core/snapshots/test-required-fields.md_20.snap create mode 100644 tests/core/snapshots/test-required-fields.md_21.snap create mode 100644 tests/core/snapshots/test-required-fields.md_3.snap create mode 100644 tests/core/snapshots/test-required-fields.md_4.snap create mode 100644 tests/core/snapshots/test-required-fields.md_5.snap create mode 100644 tests/core/snapshots/test-required-fields.md_6.snap create mode 100644 tests/core/snapshots/test-required-fields.md_7.snap create mode 100644 tests/core/snapshots/test-required-fields.md_8.snap create mode 100644 tests/core/snapshots/test-required-fields.md_9.snap create mode 100644 tests/core/snapshots/test-required-fields.md_client.snap create mode 100644 tests/core/snapshots/test-required-fields.md_merged.snap create mode 100644 tests/execution/test-required-fields.md diff --git a/src/core/blueprint/blueprint.rs b/src/core/blueprint/blueprint.rs index d5dd1a3d06..6fa14f5c67 100644 --- a/src/core/blueprint/blueprint.rs +++ b/src/core/blueprint/blueprint.rs @@ -77,6 +77,7 @@ impl Type { Type::ListType { non_null, .. } => *non_null, } } + /// checks if the type is a list pub fn is_list(&self) -> bool { matches!(self, Type::ListType { .. }) diff --git a/src/core/jit/synth/synth.rs b/src/core/jit/synth/synth.rs index f319e97b0e..387952bb30 100644 --- a/src/core/jit/synth/synth.rs +++ b/src/core/jit/synth/synth.rs @@ -42,6 +42,8 @@ where if !self.include(child) { continue; } + // TODO: in case of error set `child.output_name` to null + // and append error to response error array let val = self.iter(child, None, &DataPath::new())?; data.insert_key(&child.output_name, val); @@ -50,12 +52,6 @@ where Ok(Value::object(data)) } - /// checks if type_of is an array and value is an array - #[inline(always)] - fn is_array(type_of: &crate::core::blueprint::Type, value: &Value) -> bool { - type_of.is_list() == value.as_array().is_some() - } - #[inline(always)] fn iter( &'a self, @@ -80,8 +76,8 @@ where Data::Single(result) => { let value = result.as_ref().map_err(Clone::clone)?; - if !Self::is_array(&node.type_of, value) { - return Ok(Value::null()); + if node.type_of.is_list() != value.as_array().is_some() { + return self.node_nullable_guard(node); } self.iter_inner(node, value, data_path) } @@ -92,12 +88,26 @@ where } } None => match value { - Some(value) => self.iter_inner(node, value, data_path), - None => Ok(Value::null()), + Some(result) => self.iter_inner(node, result, data_path), + None => self.node_nullable_guard(node), }, } } + /// This guard ensures to return Null value only if node type permits it, in + /// case it does not it throws an Error + fn node_nullable_guard( + &'a self, + node: &'a Field, Value>, + ) -> Result> { + // according to GraphQL spec https://spec.graphql.org/October2021/#sec-Handling-Field-Errors + if node.type_of.is_nullable() { + Ok(Value::null()) + } else { + Err(ValidationError::ValueRequired.into()).map_err(|e| self.to_location_error(e, node)) + } + } + #[inline(always)] fn iter_inner( &'a self, @@ -105,12 +115,18 @@ where value: &'a Value, data_path: &DataPath, ) -> Result> { + // skip the field if field is not included in schema if !self.include(node) { return Ok(Value::null()); } let eval_result = if value.is_null() { - if node.type_of.is_nullable() { + // check the nullability of this type unwrapping list modifier + let is_nullable = match &node.type_of { + crate::core::blueprint::Type::NamedType { non_null, .. } => !*non_null, + crate::core::blueprint::Type::ListType { of_type, .. } => of_type.is_nullable(), + }; + if is_nullable { Ok(Value::null()) } else { Err(ValidationError::ValueRequired.into()) diff --git a/tests/core/snapshots/test-required-fields.md_0.snap b/tests/core/snapshots/test-required-fields.md_0.snap new file mode 100644 index 0000000000..c526e94e7a --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_0.snap @@ -0,0 +1,18 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "basicPresent": { + "id": 1, + "bar": "bar_1" + } + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_1.snap b/tests/core/snapshots/test-required-fields.md_1.snap new file mode 100644 index 0000000000..93309c9793 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_1.snap @@ -0,0 +1,28 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 32 + } + ], + "path": [ + "basicFieldMissing", + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_10.snap b/tests/core/snapshots/test-required-fields.md_10.snap new file mode 100644 index 0000000000..1621f25fbc --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_10.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "innerPresent": [ + { + "id": 1, + "bar": "bar_1" + }, + { + "id": 2, + "bar": "bar_2" + } + ] + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_11.snap b/tests/core/snapshots/test-required-fields.md_11.snap new file mode 100644 index 0000000000..7cb54826dc --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_11.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "innerMissing": null + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_12.snap b/tests/core/snapshots/test-required-fields.md_12.snap new file mode 100644 index 0000000000..b5d4689146 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_12.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 32 + } + ], + "path": [ + "innerFieldMissing", + 1, + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_13.snap b/tests/core/snapshots/test-required-fields.md_13.snap new file mode 100644 index 0000000000..65e3a4af79 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_13.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 29 + } + ], + "path": [ + "innerEntryMissing", + 1, + "id" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_14.snap b/tests/core/snapshots/test-required-fields.md_14.snap new file mode 100644 index 0000000000..c5e590a3a4 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_14.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "outerPresent": [ + { + "id": 1, + "bar": "bar_1" + }, + { + "id": 2, + "bar": "bar_2" + } + ] + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_15.snap b/tests/core/snapshots/test-required-fields.md_15.snap new file mode 100644 index 0000000000..e96d3c87a0 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_15.snap @@ -0,0 +1,27 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 9 + } + ], + "path": [ + "outerMissing" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_16.snap b/tests/core/snapshots/test-required-fields.md_16.snap new file mode 100644 index 0000000000..f04b340dcd --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_16.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 32 + } + ], + "path": [ + "outerFieldMissing", + 1, + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_17.snap b/tests/core/snapshots/test-required-fields.md_17.snap new file mode 100644 index 0000000000..b6f7294e44 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_17.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 29 + } + ], + "path": [ + "outerEntryMissing", + 1, + "id" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_18.snap b/tests/core/snapshots/test-required-fields.md_18.snap new file mode 100644 index 0000000000..f3832a962b --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_18.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "nonePresent": [ + { + "id": 1, + "bar": "bar_1" + }, + { + "id": 2, + "bar": "bar_2" + } + ] + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_19.snap b/tests/core/snapshots/test-required-fields.md_19.snap new file mode 100644 index 0000000000..52b8480df5 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_19.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "noneMissing": null + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_2.snap b/tests/core/snapshots/test-required-fields.md_2.snap new file mode 100644 index 0000000000..7f404ec366 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_2.snap @@ -0,0 +1,27 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 9 + } + ], + "path": [ + "basicMissing" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_20.snap b/tests/core/snapshots/test-required-fields.md_20.snap new file mode 100644 index 0000000000..8ec7714e13 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_20.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 31 + } + ], + "path": [ + "noneFieldMissing", + 1, + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_21.snap b/tests/core/snapshots/test-required-fields.md_21.snap new file mode 100644 index 0000000000..0bee8203d9 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_21.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 28 + } + ], + "path": [ + "noneEntryMissing", + 1, + "id" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_3.snap b/tests/core/snapshots/test-required-fields.md_3.snap new file mode 100644 index 0000000000..635314f4a5 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_3.snap @@ -0,0 +1,18 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "relaxedPresent": { + "id": 1, + "bar": "bar_1" + } + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_4.snap b/tests/core/snapshots/test-required-fields.md_4.snap new file mode 100644 index 0000000000..39634a5fb0 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_4.snap @@ -0,0 +1,28 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 34 + } + ], + "path": [ + "relaxedFieldMissing", + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_5.snap b/tests/core/snapshots/test-required-fields.md_5.snap new file mode 100644 index 0000000000..9ecc8ec71f --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_5.snap @@ -0,0 +1,15 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "relaxedMissing": null + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_6.snap b/tests/core/snapshots/test-required-fields.md_6.snap new file mode 100644 index 0000000000..2355e1e2bb --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_6.snap @@ -0,0 +1,24 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": { + "fullPresent": [ + { + "id": 1, + "bar": "bar_1" + }, + { + "id": 2, + "bar": "bar_2" + } + ] + } + } +} diff --git a/tests/core/snapshots/test-required-fields.md_7.snap b/tests/core/snapshots/test-required-fields.md_7.snap new file mode 100644 index 0000000000..79647f46ac --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_7.snap @@ -0,0 +1,27 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 9 + } + ], + "path": [ + "fullMissing" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_8.snap b/tests/core/snapshots/test-required-fields.md_8.snap new file mode 100644 index 0000000000..d0b7ef5c84 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_8.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 31 + } + ], + "path": [ + "fullFieldMissing", + 1, + "bar" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_9.snap b/tests/core/snapshots/test-required-fields.md_9.snap new file mode 100644 index 0000000000..392bb18994 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_9.snap @@ -0,0 +1,29 @@ +--- +source: tests/core/spec.rs +expression: response +--- +{ + "status": 200, + "headers": { + "content-type": "application/json" + }, + "body": { + "data": null, + "errors": [ + { + "message": "internal: non-null types require a return value", + "locations": [ + { + "line": 1, + "column": 28 + } + ], + "path": [ + "fullEntryMissing", + 1, + "id" + ] + } + ] + } +} diff --git a/tests/core/snapshots/test-required-fields.md_client.snap b/tests/core/snapshots/test-required-fields.md_client.snap new file mode 100644 index 0000000000..0d38a402d0 --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_client.snap @@ -0,0 +1,73 @@ +--- +source: tests/core/spec.rs +expression: formatted +--- +scalar Bytes + +scalar Date + +scalar DateTime + +scalar Email + +scalar Empty + +type Foo { + bar: String! + id: Int! +} + +scalar Int128 + +scalar Int16 + +scalar Int32 + +scalar Int64 + +scalar Int8 + +scalar JSON + +scalar PhoneNumber + +type Query { + basicFieldMissing: Foo! + basicMissing: Foo! + basicPresent: Foo! + fullEntryMissing: [Foo!]! + fullFieldMissing: [Foo!]! + fullMissing: [Foo!]! + fullPresent: [Foo!]! + innerEntryMissing: [Foo!] + innerFieldMissing: [Foo!] + innerMissing: [Foo!] + innerPresent: [Foo!] + noneEntryMissing: [Foo] + noneFieldMissing: [Foo] + noneMissing: [Foo] + nonePresent: [Foo] + outerEntryMissing: [Foo]! + outerFieldMissing: [Foo]! + outerMissing: [Foo]! + outerPresent: [Foo]! + relaxedFieldMissing: Foo + relaxedMissing: Foo + relaxedPresent: Foo +} + +scalar UInt128 + +scalar UInt16 + +scalar UInt32 + +scalar UInt64 + +scalar UInt8 + +scalar Url + +schema { + query: Query +} diff --git a/tests/core/snapshots/test-required-fields.md_merged.snap b/tests/core/snapshots/test-required-fields.md_merged.snap new file mode 100644 index 0000000000..3a4bb400ff --- /dev/null +++ b/tests/core/snapshots/test-required-fields.md_merged.snap @@ -0,0 +1,37 @@ +--- +source: tests/core/spec.rs +expression: formatter +--- +schema @server @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Foo { + bar: String! + id: Int! +} + +type Query { + basicFieldMissing: Foo! @http(path: "/basic-field-missing") + basicMissing: Foo! @http(path: "/basic-missing") + basicPresent: Foo! @http(path: "/basic-present") + fullEntryMissing: [Foo!]! @http(path: "/full-entry-missing") + fullFieldMissing: [Foo!]! @http(path: "/full-field-missing") + fullMissing: [Foo!]! @http(path: "/full-missing") + fullPresent: [Foo!]! @http(path: "/full-present") + innerEntryMissing: [Foo!] @http(path: "/inner-entry-missing") + innerFieldMissing: [Foo!] @http(path: "/inner-field-missing") + innerMissing: [Foo!] @http(path: "/inner-missing") + innerPresent: [Foo!] @http(path: "/inner-present") + noneEntryMissing: [Foo] @http(path: "/none-entry-missing") + noneFieldMissing: [Foo] @http(path: "/none-field-missing") + noneMissing: [Foo] @http(path: "/none-missing") + nonePresent: [Foo] @http(path: "/none-present") + outerEntryMissing: [Foo]! @http(path: "/outer-entry-missing") + outerFieldMissing: [Foo]! @http(path: "/outer-field-missing") + outerMissing: [Foo]! @http(path: "/outer-missing") + outerPresent: [Foo]! @http(path: "/outer-present") + relaxedFieldMissing: Foo @http(path: "/relaxed-field-missing") + relaxedMissing: Foo @http(path: "/relaxed-missing") + relaxedPresent: Foo @http(path: "/relaxed-present") +} diff --git a/tests/execution/test-required-fields.md b/tests/execution/test-required-fields.md new file mode 100644 index 0000000000..115ca86075 --- /dev/null +++ b/tests/execution/test-required-fields.md @@ -0,0 +1,358 @@ +# Test API + +```graphql @config +schema @server @upstream(baseURL: "http://jsonplaceholder.typicode.com") { + query: Query +} + +type Query { + basicPresent: Foo! @http(path: "/basic-present") + basicFieldMissing: Foo! @http(path: "/basic-field-missing") + basicMissing: Foo! @http(path: "/basic-missing") + relaxedPresent: Foo @http(path: "/relaxed-present") + relaxedFieldMissing: Foo @http(path: "/relaxed-field-missing") + relaxedMissing: Foo @http(path: "/relaxed-missing") + fullPresent: [Foo!]! @http(path: "/full-present") + fullMissing: [Foo!]! @http(path: "/full-missing") + fullFieldMissing: [Foo!]! @http(path: "/full-field-missing") + fullEntryMissing: [Foo!]! @http(path: "/full-entry-missing") + innerPresent: [Foo!] @http(path: "/inner-present") + innerMissing: [Foo!] @http(path: "/inner-missing") + innerFieldMissing: [Foo!] @http(path: "/inner-field-missing") + innerEntryMissing: [Foo!] @http(path: "/inner-entry-missing") + outerPresent: [Foo]! @http(path: "/outer-present") + outerMissing: [Foo]! @http(path: "/outer-missing") + outerFieldMissing: [Foo]! @http(path: "/outer-field-missing") + outerEntryMissing: [Foo]! @http(path: "/outer-entry-missing") + nonePresent: [Foo] @http(path: "/none-present") + noneMissing: [Foo] @http(path: "/none-missing") + noneFieldMissing: [Foo] @http(path: "/none-field-missing") + noneEntryMissing: [Foo] @http(path: "/none-entry-missing") +} + +type Foo { + id: Int! + bar: String! +} +``` + +```yml @mock +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/basic-present + response: + status: 200 + body: + id: 1 + bar: bar_1 + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/basic-field-missing + response: + status: 200 + body: + id: 1 + bar: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/basic-missing + response: + status: 200 + body: null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/relaxed-present + response: + status: 200 + body: + id: 1 + bar: bar_1 + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/relaxed-field-missing + response: + status: 200 + body: + id: 1 + bar: null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/relaxed-missing + response: + status: 200 + body: null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/full-present + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: bar_2 + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/full-missing + response: + status: 200 + body: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/full-field-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/full-entry-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/inner-present + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: bar_2 + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/inner-missing + response: + status: 200 + body: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/inner-field-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/inner-entry-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/outer-present + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: bar_2 + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/outer-missing + response: + status: 200 + body: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/outer-field-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/outer-entry-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/none-present + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: bar_2 + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/none-missing + response: + status: 200 + body: null + +# this fails +- request: + method: GET + url: http://jsonplaceholder.typicode.com/none-field-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - id: 2 + bar: null + +# this does not fail +- request: + method: GET + url: http://jsonplaceholder.typicode.com/none-entry-missing + response: + status: 200 + body: + - id: 1 + bar: bar_1 + - null +``` + +```yml @test +- method: POST + url: http://localhost:8080/graphql + body: + query: query { basicPresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { basicFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { basicMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { relaxedPresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { relaxedFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { relaxedMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { fullPresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { fullMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { fullFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { fullEntryMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { innerPresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { innerMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { innerFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { innerEntryMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { outerPresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { outerMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { outerFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { outerEntryMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { nonePresent { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { noneMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { noneFieldMissing { id bar } } +- method: POST + url: http://localhost:8080/graphql + body: + query: query { noneEntryMissing { id bar } } +```