diff --git a/Cargo.lock b/Cargo.lock index c0a7ce05af..8b422db473 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -218,9 +218,9 @@ dependencies = [ [[package]] name = "apollo-parser" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9692c1bfa7e0628e5c46bd0538571dd469138a0f062290fd4bf90e20e46f05da" +checksum = "b64257011a999f2e22275cf7a118f651e58dc9170e11b775d435de768fad0387" dependencies = [ "memchr", "rowan", diff --git a/Cargo.toml b/Cargo.toml index e48b91c854..d8514547c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,7 +50,7 @@ debug = 1 # https://doc.rust-lang.org/cargo/reference/workspaces.html#the-dependencies-table [workspace.dependencies] apollo-compiler = "=1.0.0-beta.24" -apollo-parser = "0.8.0" +apollo-parser = "0.8.3" apollo-smith = "0.14.0" async-trait = "0.1.77" hex = { version = "0.4.3", features = ["serde"] } diff --git a/apollo-router/src/apollo_studio_interop/mod.rs b/apollo-router/src/apollo_studio_interop/mod.rs index 9aeaee61f6..4499a9b557 100644 --- a/apollo-router/src/apollo_studio_interop/mod.rs +++ b/apollo-router/src/apollo_studio_interop/mod.rs @@ -223,20 +223,17 @@ pub(crate) fn generate_extended_references( pub(crate) fn extract_enums_from_response( query: Arc, - operation_name: Option<&str>, schema: &Valid, response_body: &Object, existing_refs: &mut ReferencedEnums, ) { - if let Some(operation) = query.operation(operation_name) { - extract_enums_from_selection_set( - &operation.selection_set, - &query.fragments, - schema, - response_body, - existing_refs, - ); - } + extract_enums_from_selection_set( + &query.operation.selection_set, + &query.fragments, + schema, + response_body, + existing_refs, + ); } fn add_enum_value_to_map( diff --git a/apollo-router/src/apollo_studio_interop/tests.rs b/apollo-router/src/apollo_studio_interop/tests.rs index 466ca301b0..754951983c 100644 --- a/apollo-router/src/apollo_studio_interop/tests.rs +++ b/apollo-router/src/apollo_studio_interop/tests.rs @@ -133,7 +133,6 @@ fn enums_from_response( let mut result = ReferencedEnums::new(); extract_enums_from_response( Arc::new(query), - operation_name, schema.supergraph_schema(), &response_body, &mut result, diff --git a/apollo-router/src/plugins/demand_control/mod.rs b/apollo-router/src/plugins/demand_control/mod.rs index 62255fa832..5e3ba587f8 100644 --- a/apollo-router/src/plugins/demand_control/mod.rs +++ b/apollo-router/src/plugins/demand_control/mod.rs @@ -471,6 +471,7 @@ mod test { use apollo_compiler::ast; use apollo_compiler::validation::Valid; use apollo_compiler::ExecutableDocument; + use apollo_compiler::Schema; use futures::StreamExt; use schemars::JsonSchema; use serde::Deserialize; @@ -482,7 +483,6 @@ mod test { use crate::plugins::demand_control::DemandControlContext; use crate::plugins::demand_control::DemandControlError; use crate::plugins::test::PluginTestHarness; - use crate::query_planner::fetch::QueryHash; use crate::services::execution; use crate::services::layers::query_analysis::ParsedDocument; use crate::services::layers::query_analysis::ParsedDocumentInner; @@ -660,14 +660,14 @@ mod test { } fn context() -> Context { - let parsed_document = ParsedDocumentInner { - executable: Arc::new(Valid::assume_valid(ExecutableDocument::new())), - hash: Arc::new(QueryHash::default()), - ast: ast::Document::new(), - }; + let schema = Schema::parse_and_validate("type Query { f: Int }", "").unwrap(); + let ast = ast::Document::parse("{__typename}", "").unwrap(); + let doc = ast.to_executable_validate(&schema).unwrap(); + let parsed_document = + ParsedDocumentInner::new(ast, doc.into(), None, Default::default()).unwrap(); let ctx = Context::new(); ctx.extensions() - .with_lock(|mut lock| lock.insert(ParsedDocument::new(parsed_document))); + .with_lock(|mut lock| lock.insert::(parsed_document)); ctx } diff --git a/apollo-router/src/plugins/telemetry/mod.rs b/apollo-router/src/plugins/telemetry/mod.rs index 1478261be5..eb3e9e85bf 100644 --- a/apollo-router/src/plugins/telemetry/mod.rs +++ b/apollo-router/src/plugins/telemetry/mod.rs @@ -698,30 +698,21 @@ impl Plugin for Telemetry { fn execution_service(&self, service: execution::BoxService) -> execution::BoxService { ServiceBuilder::new() .instrument(move |req: &ExecutionRequest| { - let operation_kind = req - .query_plan - .query - .operation(req.supergraph_request.body().operation_name.as_deref()) - .map(|op| *op.kind()); + let operation_kind = req.query_plan.query.operation.kind(); match operation_kind { - Some(operation_kind) => match operation_kind { - OperationKind::Subscription => info_span!( - EXECUTION_SPAN_NAME, - "otel.kind" = "INTERNAL", - "graphql.operation.type" = operation_kind.as_apollo_operation_type(), - "apollo_private.operation.subtype" = - OperationSubType::SubscriptionRequest.as_str(), - ), - _ => info_span!( - EXECUTION_SPAN_NAME, - "otel.kind" = "INTERNAL", - "graphql.operation.type" = operation_kind.as_apollo_operation_type(), - ), - }, - None => { - info_span!(EXECUTION_SPAN_NAME, "otel.kind" = "INTERNAL",) - } + OperationKind::Subscription => info_span!( + EXECUTION_SPAN_NAME, + "otel.kind" = "INTERNAL", + "graphql.operation.type" = operation_kind.as_apollo_operation_type(), + "apollo_private.operation.subtype" = + OperationSubType::SubscriptionRequest.as_str(), + ), + _ => info_span!( + EXECUTION_SPAN_NAME, + "otel.kind" = "INTERNAL", + "graphql.operation.type" = operation_kind.as_apollo_operation_type(), + ), } }) .service(service) diff --git a/apollo-router/src/query_planner/bridge_query_planner.rs b/apollo-router/src/query_planner/bridge_query_planner.rs index 9cab43aab8..31cc8436b2 100644 --- a/apollo-router/src/query_planner/bridge_query_planner.rs +++ b/apollo-router/src/query_planner/bridge_query_planner.rs @@ -508,19 +508,19 @@ impl BridgeQueryPlanner { operation_name, )?; - let (fragments, operations, defer_stats, schema_aware_hash) = + let (fragments, operation, defer_stats, schema_aware_hash) = Query::extract_query_information(&self.schema, executable, operation_name)?; let subselections = crate::spec::query::subselections::collect_subselections( &self.configuration, - &operations, + &operation, &fragments.map, &defer_stats, )?; Ok(Query { string: query, fragments, - operations, + operation, filtered_query: None, unauthorized: UnauthorizedPaths { paths: vec![], @@ -543,11 +543,9 @@ impl BridgeQueryPlanner { IntrospectionMode::Rust => { let schema = self.schema.clone(); let response = Box::new( - tokio::task::spawn_blocking(move || { - Self::rust_introspection(&schema, &key, &doc) - }) - .await - .expect("Introspection panicked")?, + tokio::task::spawn_blocking(move || Self::rust_introspection(&schema, &doc)) + .await + .expect("Introspection panicked")?, ); return Ok(QueryPlannerContent::Response { response }); } @@ -582,7 +580,7 @@ impl BridgeQueryPlanner { let schema = self.schema.clone(); let js_result_clone = js_result.clone(); tokio::task::spawn_blocking(move || { - let rust_result = match Self::rust_introspection(&schema, &key, &doc) { + let rust_result = match Self::rust_introspection(&schema, &doc) { Ok(response) => { if response.errors.is_empty() { Ok(response) @@ -619,11 +617,10 @@ impl BridgeQueryPlanner { fn rust_introspection( schema: &Schema, - key: &QueryKey, doc: &ParsedDocument, ) -> Result { let schema = schema.api_schema(); - let operation = doc.get_operation(key.operation_name.as_deref())?; + let operation = &doc.operation; let variable_values = Default::default(); let variable_values = apollo_compiler::execution::coerce_variable_values(schema, operation, &variable_values) @@ -798,11 +795,12 @@ impl Service for BridgeQueryPlanner { operation_name.as_deref(), ) .map_err(|e| SpecError::QueryHashing(e.to_string()))?; - doc = Arc::new(ParsedDocumentInner { - executable: Arc::new(executable_document), - ast: modified_query, - hash: Arc::new(QueryHash(hash)), - }); + doc = ParsedDocumentInner::new( + modified_query, + Arc::new(executable_document), + operation_name.as_deref(), + Arc::new(QueryHash(hash)), + )?; context .extensions() .with_lock(|mut lock| lock.insert::(doc.clone())); @@ -880,10 +878,7 @@ impl BridgeQueryPlanner { ) .await?; - if selections - .operation(key.operation_name.as_deref()) - .is_some_and(|op| op.selection_set.is_empty()) - { + if selections.operation.selection_set.is_empty() { // All selections have @skip(true) or @include(false) // Return an empty response now to avoid dealing with an empty query plan later return Ok(QueryPlannerContent::Response { @@ -895,70 +890,46 @@ impl BridgeQueryPlanner { }); } - { - let operation = doc - .executable - .operations - .get(key.operation_name.as_deref()) - .ok(); - let mut has_root_typename = false; - let mut has_schema_introspection = false; - let mut has_other_root_fields = false; - if let Some(operation) = operation { - for field in operation.root_fields(&doc.executable) { - match field.name.as_str() { - "__typename" => has_root_typename = true, - "__schema" | "__type" if operation.is_query() => { - has_schema_introspection = true - } - _ => has_other_root_fields = true, - } - } - if has_root_typename && !has_schema_introspection && !has_other_root_fields { - // Fast path for __typename alone - if operation - .selection_set - .selections - .iter() - .all(|sel| sel.as_field().is_some_and(|f| f.name == "__typename")) - { - let root_type_name: serde_json_bytes::ByteString = - operation.object_type().as_str().into(); - let data = Value::Object( - operation - .root_fields(&doc.executable) - .filter(|field| field.name == "__typename") - .map(|field| { - ( - field.response_key().as_str().into(), - Value::String(root_type_name.clone()), - ) - }) - .collect(), - ); - return Ok(QueryPlannerContent::Response { - response: Box::new(graphql::Response::builder().data(data).build()), - }); - } else { - // fragments might use @include or @skip - } - } + if doc.has_root_typename && !doc.has_schema_introspection && !doc.has_explicit_root_fields { + // Fast path for __typename alone + if doc.operation.selection_set.selections.iter().all(|sel| { + sel.as_field() + .is_some_and(|f| f.name == "__typename" && f.directives.is_empty()) + }) { + let root_type_name: serde_json_bytes::ByteString = + doc.operation.object_type().as_str().into(); + let data = Value::Object( + doc.operation + .root_fields(&doc.executable) + .filter(|field| field.name == "__typename") + .map(|field| { + ( + field.response_key().as_str().into(), + Value::String(root_type_name.clone()), + ) + }) + .collect(), + ); + return Ok(QueryPlannerContent::Response { + response: Box::new(graphql::Response::builder().data(data).build()), + }); } else { - // Should be unreachable as QueryAnalysisLayer would have returned an error + // We have fragments which might use @skip or @include, + // or field directives which might be @skip or @include. } + } - if has_schema_introspection { - if has_other_root_fields { - let error = graphql::Error::builder() + if doc.has_schema_introspection { + if doc.has_explicit_root_fields { + let error = graphql::Error::builder() .message("Mixed queries with both schema introspection and concrete fields are not supported") .extension_code("MIXED_INTROSPECTION") .build(); - return Ok(QueryPlannerContent::Response { - response: Box::new(graphql::Response::builder().error(error).build()), - }); - } - return self.introspection(key, doc).await; + return Ok(QueryPlannerContent::Response { + response: Box::new(graphql::Response::builder().error(error).build()), + }); } + return self.introspection(key, doc).await; } let filter_res = if self.enable_authorization_directives { @@ -1001,11 +972,12 @@ impl BridgeQueryPlanner { key.operation_name.as_deref(), ) .map_err(|e| SpecError::QueryHashing(e.to_string()))?; - doc = Arc::new(ParsedDocumentInner { - executable: Arc::new(executable_document), - ast: new_doc, - hash: Arc::new(QueryHash(hash)), - }); + doc = ParsedDocumentInner::new( + new_doc, + Arc::new(executable_document), + key.operation_name.as_deref(), + Arc::new(QueryHash(hash)), + )?; selections.unauthorized.paths = unauthorized_paths; } @@ -1268,10 +1240,11 @@ mod tests { #[test(tokio::test)] async fn test_plan_error() { - let result = plan(EXAMPLE_SCHEMA, "", "", None, PlanOptions::default()).await; + let query = ""; + let result = plan(EXAMPLE_SCHEMA, query, query, None, PlanOptions::default()).await; assert_eq!( - "couldn't plan query: query validation errors: Syntax Error: Unexpected .", + "spec error: parsing error: syntax error: Unexpected .", result.unwrap_err().to_string() ); } @@ -1654,8 +1627,7 @@ mod tests { operation_name.as_deref(), &planner.schema(), &configuration, - ) - .unwrap(); + )?; planner .get( diff --git a/apollo-router/src/query_planner/caching_query_planner.rs b/apollo-router/src/query_planner/caching_query_planner.rs index b0a527584e..56d214225b 100644 --- a/apollo-router/src/query_planner/caching_query_planner.rs +++ b/apollo-router/src/query_planner/caching_query_planner.rs @@ -280,7 +280,7 @@ where } in all_cache_keys { let context = Context::new(); - let (doc, _operation_def) = match query_analysis + let doc = match query_analysis .parse_document(&query, operation_name.as_deref()) .await { @@ -324,7 +324,7 @@ where }) .await; if entry.is_first() { - let (doc, _operation_def) = match query_analysis + let doc = match query_analysis .parse_document(&query, operation_name.as_deref()) .await { diff --git a/apollo-router/src/query_planner/execution.rs b/apollo-router/src/query_planner/execution.rs index c6bd24e43e..888e8cb5d4 100644 --- a/apollo-router/src/query_planner/execution.rs +++ b/apollo-router/src/query_planner/execution.rs @@ -331,11 +331,6 @@ impl PlanNode { let v = parameters .query .variable_value( - parameters - .supergraph_request - .body() - .operation_name - .as_deref(), condition.as_str(), ¶meters.supergraph_request.body().variables, ) diff --git a/apollo-router/src/query_planner/plan.rs b/apollo-router/src/query_planner/plan.rs index 447adb7ba7..f0e5763358 100644 --- a/apollo-router/src/query_planner/plan.rs +++ b/apollo-router/src/query_planner/plan.rs @@ -77,25 +77,21 @@ impl QueryPlan { } impl QueryPlan { - pub(crate) fn is_deferred(&self, operation: Option<&str>, variables: &Object) -> bool { - self.root.is_deferred(operation, variables, &self.query) + pub(crate) fn is_deferred(&self, variables: &Object) -> bool { + self.root.is_deferred(variables, &self.query) } - pub(crate) fn is_subscription(&self, operation: Option<&str>) -> bool { - match self.query.operation(operation) { - Some(op) => matches!(op.kind(), OperationKind::Subscription), - None => false, - } + pub(crate) fn is_subscription(&self) -> bool { + matches!(self.query.operation.kind(), OperationKind::Subscription) } pub(crate) fn query_hashes( &self, batching_config: Batching, - operation: Option<&str>, variables: &Object, ) -> Result>, CacheResolverError> { self.root - .query_hashes(batching_config, operation, variables, &self.query) + .query_hashes(batching_config, variables, &self.query) } pub(crate) fn estimated_size(&self) -> usize { @@ -180,20 +176,11 @@ impl PlanNode { } } - pub(crate) fn is_deferred( - &self, - operation: Option<&str>, - variables: &Object, - query: &Query, - ) -> bool { + pub(crate) fn is_deferred(&self, variables: &Object, query: &Query) -> bool { match self { - Self::Sequence { nodes } => nodes - .iter() - .any(|n| n.is_deferred(operation, variables, query)), - Self::Parallel { nodes } => nodes - .iter() - .any(|n| n.is_deferred(operation, variables, query)), - Self::Flatten(node) => node.node.is_deferred(operation, variables, query), + Self::Sequence { nodes } => nodes.iter().any(|n| n.is_deferred(variables, query)), + Self::Parallel { nodes } => nodes.iter().any(|n| n.is_deferred(variables, query)), + Self::Flatten(node) => node.node.is_deferred(variables, query), Self::Fetch(..) => false, Self::Defer { .. } => true, Self::Subscription { .. } => false, @@ -203,19 +190,19 @@ impl PlanNode { condition, } => { if query - .variable_value(operation, condition.as_str(), variables) + .variable_value(condition.as_str(), variables) .map(|v| *v == Value::Bool(true)) .unwrap_or(true) { // right now ConditionNode is only used with defer, but it might be used // in the future to implement @skip and @include execution if let Some(node) = if_clause { - if node.is_deferred(operation, variables, query) { + if node.is_deferred(variables, query) { return true; } } } else if let Some(node) = else_clause { - if node.is_deferred(operation, variables, query) { + if node.is_deferred(variables, query) { return true; } } @@ -240,7 +227,6 @@ impl PlanNode { pub(crate) fn query_hashes( &self, batching_config: Batching, - operation: Option<&str>, variables: &Object, query: &Query, ) -> Result>, CacheResolverError> { @@ -286,7 +272,7 @@ impl PlanNode { condition, } => { if query - .variable_value(operation, condition.as_str(), variables) + .variable_value(condition.as_str(), variables) .map(|v| *v == Value::Bool(true)) .unwrap_or(true) { diff --git a/apollo-router/src/services/execution/service.rs b/apollo-router/src/services/execution/service.rs index 1abcb0c5d3..2869f1ade9 100644 --- a/apollo-router/src/services/execution/service.rs +++ b/apollo-router/src/services/execution/service.rs @@ -123,13 +123,10 @@ impl ExecutionService { let context = req.context; let ctx = context.clone(); let variables = req.supergraph_request.body().variables.clone(); - let operation_name = req.supergraph_request.body().operation_name.clone(); let (sender, receiver) = mpsc::channel(10); - let is_deferred = req - .query_plan - .is_deferred(operation_name.as_deref(), &variables); - let is_subscription = req.query_plan.is_subscription(operation_name.as_deref()); + let is_deferred = req.query_plan.is_deferred(&variables); + let is_subscription = req.query_plan.is_subscription(); let mut claims = None; if is_deferred { claims = context.get(APOLLO_AUTHENTICATION_JWT_CLAIMS).ok().flatten() @@ -240,7 +237,6 @@ impl ExecutionService { ready(execution_span.in_scope(|| { Self::process_graphql_response( &query, - operation_name.as_deref(), &variables, is_deferred, &schema, @@ -259,7 +255,6 @@ impl ExecutionService { #[allow(clippy::too_many_arguments)] fn process_graphql_response( query: &Arc, - operation_name: Option<&str>, variables: &Object, is_deferred: bool, schema: &Arc, @@ -295,7 +290,7 @@ impl ExecutionService { } let has_next = response.has_next.unwrap_or(true); - let variables_set = query.defer_variables_set(operation_name, variables); + let variables_set = query.defer_variables_set(variables); tracing::debug_span!("format_response").in_scope(|| { let mut paths = Vec::new(); @@ -332,7 +327,6 @@ impl ExecutionService { if let Some(filtered_query) = query.filtered_query.as_ref() { paths = filtered_query.format_response( &mut response, - operation_name, variables.clone(), schema.api_schema(), variables_set, @@ -343,7 +337,6 @@ impl ExecutionService { query .format_response( &mut response, - operation_name, variables.clone(), schema.api_schema(), variables_set, @@ -360,7 +353,6 @@ impl ExecutionService { if let (ApolloMetricsReferenceMode::Extended, Some(Value::Object(response_body))) = (metrics_ref_mode, &response.data) { extract_enums_from_response( query.clone(), - operation_name, schema.api_schema(), response_body, &mut referenced_enums, @@ -380,12 +372,9 @@ impl ExecutionService { response.errors.retain(|error| match &error.path { None => true, - Some(error_path) => query.contains_error_path( - operation_name, - &response.label, - error_path, - variables_set, - ), + Some(error_path) => { + query.contains_error_path(&response.label, error_path, variables_set) + } }); response.label = rewrite_defer_label(&response); @@ -433,7 +422,6 @@ impl ExecutionService { Self::split_incremental_response( query, - operation_name, has_next, variables_set, response, @@ -445,7 +433,6 @@ impl ExecutionService { fn split_incremental_response( query: &Arc, - operation_name: Option<&str>, has_next: bool, variables_set: BooleanValues, response: Response, @@ -464,12 +451,8 @@ impl ExecutionService { .filter(|error| match &error.path { None => false, Some(error_path) => { - query.contains_error_path( - operation_name, - &response.label, - error_path, - variables_set, - ) && error_path.starts_with(&path) + query.contains_error_path(&response.label, error_path, variables_set) + && error_path.starts_with(&path) } }) .cloned() diff --git a/apollo-router/src/services/layers/allow_only_http_post_mutations.rs b/apollo-router/src/services/layers/allow_only_http_post_mutations.rs index 16179f07bb..6c737ec76e 100644 --- a/apollo-router/src/services/layers/allow_only_http_post_mutations.rs +++ b/apollo-router/src/services/layers/allow_only_http_post_mutations.rs @@ -286,11 +286,10 @@ mod forbid_http_get_mutations_tests { let context = Context::new(); context.extensions().with_lock(|mut lock| { - lock.insert::(Arc::new(ParsedDocumentInner { - ast, - executable: Arc::new(executable), - hash: Default::default(), - })) + lock.insert::( + ParsedDocumentInner::new(ast, Arc::new(executable), None, Default::default()) + .unwrap(), + ) }); SupergraphRequest::fake_builder() diff --git a/apollo-router/src/services/layers/query_analysis.rs b/apollo-router/src/services/layers/query_analysis.rs index d4280df414..c288f7f96f 100644 --- a/apollo-router/src/services/layers/query_analysis.rs +++ b/apollo-router/src/services/layers/query_analysis.rs @@ -79,7 +79,7 @@ impl QueryAnalysisLayer { &self, query: &str, operation_name: Option<&str>, - ) -> Result<(ParsedDocument, Node), SpecError> { + ) -> Result { let query = query.to_string(); let operation_name = operation_name.map(|o| o.to_string()); let schema = self.schema.clone(); @@ -91,14 +91,12 @@ impl QueryAnalysisLayer { task::spawn_blocking(move || { span.in_scope(|| { - let doc = Query::parse_document( + Query::parse_document( &query, operation_name.as_deref(), schema.as_ref(), conf.as_ref(), - )?; - let operation = doc.get_operation(operation_name.as_deref())?.clone(); - Ok((doc, operation)) + ) }) }) .await @@ -161,7 +159,7 @@ impl QueryAnalysisLayer { ); Err(errors) } - Ok((doc, operation)) => { + Ok(doc) => { let context = Context::new(); if self.enable_authorization_directives { @@ -174,9 +172,9 @@ impl QueryAnalysisLayer { } context - .insert(OPERATION_NAME, operation.name.clone()) + .insert(OPERATION_NAME, doc.operation.name.clone()) .expect("cannot insert operation name into context; this is a bug"); - let operation_kind = OperationKind::from(operation.operation_type); + let operation_kind = OperationKind::from(doc.operation.operation_type); context .insert(OPERATION_KIND, operation_kind) .expect("cannot insert operation kind in the context; this is a bug"); @@ -257,25 +255,63 @@ pub(crate) struct ParsedDocumentInner { pub(crate) ast: ast::Document, pub(crate) executable: Arc>, pub(crate) hash: Arc, + pub(crate) operation: Node, + /// `__typename` + pub(crate) has_root_typename: bool, + /// `__schema` or `__type` + pub(crate) has_schema_introspection: bool, + /// Non-meta fields explicitly defined in the schema + pub(crate) has_explicit_root_fields: bool, } +#[derive(Debug)] +pub(crate) struct RootFieldKinds {} + impl ParsedDocumentInner { - pub(crate) fn get_operation( - &self, + pub(crate) fn new( + ast: ast::Document, + executable: Arc>, operation_name: Option<&str>, - ) -> Result<&Node, SpecError> { - if let Ok(operation) = self.executable.operations.get(operation_name) { - Ok(operation) - } else if let Some(name) = operation_name { - Err(SpecError::UnknownOperation(name.to_owned())) - } else if self.executable.operations.is_empty() { - // Maybe not reachable? - // A valid document is non-empty and has no unused fragments - Err(SpecError::NoOperation) - } else { - debug_assert!(self.executable.operations.len() > 1); - Err(SpecError::MultipleOperationWithoutOperationName) + hash: Arc, + ) -> Result, SpecError> { + let operation = get_operation(&executable, operation_name)?; + let mut has_root_typename = false; + let mut has_schema_introspection = false; + let mut has_explicit_root_fields = false; + for field in operation.root_fields(&executable) { + match field.name.as_str() { + "__typename" => has_root_typename = true, + "__schema" | "__type" if operation.is_query() => has_schema_introspection = true, + _ => has_explicit_root_fields = true, + } } + Ok(Arc::new(Self { + ast, + executable, + hash, + operation, + has_root_typename, + has_schema_introspection, + has_explicit_root_fields, + })) + } +} + +pub(crate) fn get_operation( + executable: &ExecutableDocument, + operation_name: Option<&str>, +) -> Result, SpecError> { + if let Ok(operation) = executable.operations.get(operation_name) { + Ok(operation.clone()) + } else if let Some(name) = operation_name { + Err(SpecError::UnknownOperation(name.to_owned())) + } else if executable.operations.is_empty() { + // Maybe not reachable? + // A valid document is non-empty and has no unused fragments + Err(SpecError::NoOperation) + } else { + debug_assert!(executable.operations.len() > 1); + Err(SpecError::MultipleOperationWithoutOperationName) } } diff --git a/apollo-router/src/services/supergraph/service.rs b/apollo-router/src/services/supergraph/service.rs index a77282d2db..9b1e2e35c8 100644 --- a/apollo-router/src/services/supergraph/service.rs +++ b/apollo-router/src/services/supergraph/service.rs @@ -233,9 +233,8 @@ async fn service_call( let _ = lock.insert::>(query_metrics); }); - let operation_name = body.operation_name.clone(); - let is_deferred = plan.is_deferred(operation_name.as_deref(), &variables); - let is_subscription = plan.is_subscription(operation_name.as_deref()); + let is_deferred = plan.is_deferred(&variables); + let is_subscription = plan.is_subscription(); if let Some(batching) = context .extensions() @@ -266,8 +265,7 @@ async fn service_call( .extensions() .with_lock(|lock| lock.get::().cloned()); if let Some(batch_query) = batch_query_opt { - let query_hashes = - plan.query_hashes(batching, operation_name.as_deref(), &variables)?; + let query_hashes = plan.query_hashes(batching, &variables)?; batch_query .set_query_hashes(query_hashes) .await diff --git a/apollo-router/src/spec/query.rs b/apollo-router/src/spec/query.rs index 21f2914035..67b1dc7b6d 100644 --- a/apollo-router/src/spec/query.rs +++ b/apollo-router/src/spec/query.rs @@ -32,6 +32,7 @@ use crate::json_ext::Value; use crate::plugins::authorization::UnauthorizedPaths; use crate::query_planner::fetch::OperationKind; use crate::query_planner::fetch::QueryHash; +use crate::services::layers::query_analysis::get_operation; use crate::services::layers::query_analysis::ParsedDocument; use crate::services::layers::query_analysis::ParsedDocumentInner; use crate::spec::schema::ApiSchema; @@ -58,7 +59,7 @@ pub(crate) struct Query { #[derivative(PartialEq = "ignore", Hash = "ignore")] pub(crate) fragments: Fragments, #[derivative(PartialEq = "ignore", Hash = "ignore")] - pub(crate) operations: Vec, + pub(crate) operation: Operation, #[derivative(PartialEq = "ignore", Hash = "ignore")] pub(crate) subselections: HashMap, #[derivative(PartialEq = "ignore", Hash = "ignore")] @@ -99,7 +100,7 @@ impl Query { fragments: Fragments { map: HashMap::new(), }, - operations: Vec::new(), + operation: Operation::empty(), subselections: HashMap::new(), unauthorized: UnauthorizedPaths::default(), filtered_query: None, @@ -121,14 +122,12 @@ impl Query { pub(crate) fn format_response( &self, response: &mut Response, - operation_name: Option<&str>, variables: Object, schema: &ApiSchema, defer_conditions: BooleanValues, ) -> Vec { let data = std::mem::take(&mut response.data); - let original_operation = self.operation(operation_name); match data { Some(Value::Object(mut input)) => { if self.is_deferred(defer_conditions) { @@ -148,13 +147,12 @@ impl Query { }; // Detect if root __typename is asked in the original query (the qp doesn't put root __typename in subselections) // cf https://github.com/apollographql/router/issues/1677 - let operation_kind_if_root_typename = - original_operation.and_then(|op| { - op.selection_set - .iter() - .any(|f| f.is_typename_field()) - .then(|| *op.kind()) - }); + let operation_kind_if_root_typename = self + .operation + .selection_set + .iter() + .any(|f| f.is_typename_field()) + .then(|| *self.operation.kind()); if let Some(operation_kind) = operation_kind_if_root_typename { output.insert(TYPENAME, operation_kind.default_type_name().into()); } @@ -186,13 +184,13 @@ impl Query { return vec![]; } } - } else if let Some(operation) = original_operation { - let mut output = Object::with_capacity(operation.selection_set.len()); + } else { + let mut output = Object::with_capacity(self.operation.selection_set.len()); - let all_variables = if operation.variables.is_empty() { + let all_variables = if self.operation.variables.is_empty() { variables } else { - operation + self.operation .variables .iter() .filter_map(|(k, Variable { default_value, .. })| { @@ -204,9 +202,9 @@ impl Query { }; let operation_type_name = schema - .root_operation(operation.kind.into()) + .root_operation(self.operation.kind.into()) .map(|name| name.as_str()) - .unwrap_or(operation.kind.default_type_name()); + .unwrap_or(self.operation.kind.default_type_name()); let mut parameters = FormatParameters { variables: &all_variables, schema, @@ -217,7 +215,7 @@ impl Query { response.data = Some( match self.apply_root_selection_set( operation_type_name, - &operation.selection_set, + &self.operation.selection_set, &mut parameters, &mut input, &mut output, @@ -234,19 +232,17 @@ impl Query { } return parameters.nullified; - } else { - failfast_debug!("can't find operation for {:?}", operation_name); } } Some(Value::Null) => { // Detect if root __typename is asked in the original query (the qp doesn't put root __typename in subselections) // cf https://github.com/apollographql/router/issues/1677 - let operation_kind_if_root_typename = original_operation.and_then(|op| { - op.selection_set - .iter() - .any(|f| f.is_typename_field()) - .then(|| *op.kind()) - }); + let operation_kind_if_root_typename = self + .operation + .selection_set + .iter() + .any(|f| f.is_typename_field()) + .then(|| *self.operation.kind()); response.data = match operation_kind_if_root_typename { Some(operation_kind) => { let mut output = Object::default(); @@ -304,11 +300,12 @@ impl Query { ) .map_err(|e| SpecError::QueryHashing(e.to_string()))?; - Ok(Arc::new(ParsedDocumentInner { + ParsedDocumentInner::new( ast, - executable: Arc::new(executable_document), - hash: Arc::new(QueryHash(hash)), - })) + Arc::new(executable_document), + operation_name, + Arc::new(QueryHash(hash)), + ) } #[cfg(test)] @@ -321,13 +318,13 @@ impl Query { let query = query.into(); let doc = Self::parse_document(&query, operation_name, schema, configuration)?; - let (fragments, operations, defer_stats, schema_aware_hash) = + let (fragments, operation, defer_stats, schema_aware_hash) = Self::extract_query_information(schema, &doc.executable, operation_name)?; Ok(Query { string: query, fragments, - operations, + operation, subselections: HashMap::new(), unauthorized: UnauthorizedPaths::default(), filtered_query: None, @@ -342,18 +339,15 @@ impl Query { schema: &Schema, document: &ExecutableDocument, operation_name: Option<&str>, - ) -> Result<(Fragments, Vec, DeferStats, Vec), SpecError> { + ) -> Result<(Fragments, Operation, DeferStats, Vec), SpecError> { let mut defer_stats = DeferStats { has_defer: false, has_unconditional_defer: false, conditional_defer_variable_names: IndexSet::default(), }; let fragments = Fragments::from_hir(document, schema, &mut defer_stats)?; - let operations = document - .operations - .iter() - .map(|operation| Operation::from_hir(operation, schema, &mut defer_stats, &fragments)) - .collect::, SpecError>>()?; + let operation = get_operation(document, operation_name)?; + let operation = Operation::from_hir(&operation, schema, &mut defer_stats, &fragments)?; let mut visitor = QueryHashVisitor::new(schema.supergraph_schema(), &schema.raw_sdl, document); @@ -362,7 +356,7 @@ impl Query { })?; let hash = visitor.finish(); - Ok((fragments, operations, defer_stats, hash)) + Ok((fragments, operation, defer_stats, hash)) } #[allow(clippy::too_many_arguments)] @@ -934,19 +928,13 @@ impl Query { request: &Request, schema: &Schema, ) -> Result<(), Response> { - let operation_name = request.operation_name.as_deref(); - let operation_variable_types = - self.operations - .iter() - .fold(HashMap::new(), |mut acc, operation| { - if operation_name.is_none() || operation.name.as_deref() == operation_name { - acc.extend(operation.variables.iter().map(|(k, v)| (k.as_str(), v))) - } - acc - }); - if LevelFilter::current() >= LevelFilter::DEBUG { - let known_variables = operation_variable_types.keys().cloned().collect(); + let known_variables = self + .operation + .variables + .keys() + .map(|k| k.as_str()) + .collect(); let provided_variables = request .variables .keys() @@ -963,7 +951,9 @@ impl Query { } } - let errors = operation_variable_types + let errors = self + .operation + .variables .iter() .filter_map( |( @@ -975,12 +965,12 @@ impl Query { )| { let value = request .variables - .get(*name) + .get(name.as_str()) .or(default_value.as_ref()) .unwrap_or(&Value::Null); ty.validate_input_value(value, schema).err().map(|_| { FetchError::ValidationInvalidTypeVariable { - name: name.to_string(), + name: name.as_str().to_string(), } .to_graphql_error(None) }) @@ -997,47 +987,23 @@ impl Query { pub(crate) fn variable_value<'a>( &'a self, - operation_name: Option<&str>, variable_name: &str, variables: &'a Object, ) -> Option<&'a Value> { variables .get(variable_name) - .or_else(|| self.default_variable_value(operation_name, variable_name)) - } - - pub(crate) fn default_variable_value( - &self, - operation_name: Option<&str>, - variable_name: &str, - ) -> Option<&Value> { - self.operation(operation_name).and_then(|op| { - op.variables - .get(variable_name) - .and_then(|Variable { default_value, .. }| default_value.as_ref()) - }) + .or_else(|| self.default_variable_value(variable_name)) } - pub(crate) fn operation(&self, operation_name: Option>) -> Option<&Operation> { - match operation_name { - Some(name) => self - .operations - .iter() - // we should have an error if the only operation is anonymous but the query specifies a name - .find(|op| { - if let Some(op_name) = op.name.as_deref() { - op_name == name.as_ref() - } else { - false - } - }), - None => self.operations.first(), - } + pub(crate) fn default_variable_value(&self, variable_name: &str) -> Option<&Value> { + self.operation + .variables + .get(variable_name) + .and_then(|Variable { default_value, .. }| default_value.as_ref()) } pub(crate) fn contains_error_path( &self, - operation_name: Option<&str>, label: &Option, path: &Path, defer_conditions: BooleanValues, @@ -1047,21 +1013,14 @@ impl Query { defer_conditions, }) { Some(subselection) => &subselection.selection_set, - None => match self.operation(operation_name) { - None => return false, - Some(op) => &op.selection_set, - }, + None => &self.operation.selection_set, }; selection_set .iter() .any(|selection| selection.contains_error_path(&path.0, &self.fragments)) } - pub(crate) fn defer_variables_set( - &self, - operation_name: Option<&str>, - variables: &Object, - ) -> BooleanValues { + pub(crate) fn defer_variables_set(&self, variables: &Object) -> BooleanValues { let mut bits = 0_u32; for (i, variable) in self .defer_stats @@ -1071,7 +1030,7 @@ impl Query { { let value = variables .get(variable.as_str()) - .or_else(|| self.default_variable_value(operation_name, variable)); + .or_else(|| self.default_variable_value(variable)); if matches!(value, Some(serde_json_bytes::Value::Bool(true))) { bits |= 1 << i; @@ -1110,6 +1069,16 @@ pub(crate) struct Variable { } impl Operation { + fn empty() -> Self { + Self { + name: None, + kind: OperationKind::Query, + type_name: "".into(), + selection_set: Vec::new(), + variables: HashMap::new(), + } + } + pub(crate) fn from_hir( operation: &executable::Operation, schema: &Schema, diff --git a/apollo-router/src/spec/query/subselections.rs b/apollo-router/src/spec/query/subselections.rs index d14146010e..2ec203144e 100644 --- a/apollo-router/src/spec/query/subselections.rs +++ b/apollo-router/src/spec/query/subselections.rs @@ -100,7 +100,7 @@ const MAX_DEFER_VARIABLES: usize = 4; pub(crate) fn collect_subselections( configuration: &Configuration, - operations: &[Operation], + operation: &Operation, fragments: &HashMap, defer_stats: &DeferStats, ) -> Result, SpecError> { @@ -122,29 +122,27 @@ pub(crate) fn collect_subselections( }; for defer_conditions in variable_combinations(defer_stats) { shared.defer_conditions = defer_conditions; - for operation in operations { - let type_name = operation.type_name.clone(); - let primary = collect_from_selection_set( - &mut shared, - // FIXME: use `ast::Name` everywhere so fallible conversion isn’t needed - #[allow(clippy::unwrap_used)] - &FieldType::new_named((&type_name).try_into().unwrap()), - &operation.selection_set, - ) - .map_err(|err| SpecError::TransformError(err.to_owned()))?; - debug_assert!(shared.path.is_empty()); - if !primary.is_empty() { - shared.subselections.insert( - SubSelectionKey { - defer_label: None, - defer_conditions, - }, - SubSelectionValue { - selection_set: primary, - type_name, - }, - ); - } + let type_name = operation.type_name.clone(); + let primary = collect_from_selection_set( + &mut shared, + // FIXME: use `ast::Name` everywhere so fallible conversion isn’t needed + #[allow(clippy::unwrap_used)] + &FieldType::new_named((&type_name).try_into().unwrap()), + &operation.selection_set, + ) + .map_err(|err| SpecError::TransformError(err.to_owned()))?; + debug_assert!(shared.path.is_empty()); + if !primary.is_empty() { + shared.subselections.insert( + SubSelectionKey { + defer_label: None, + defer_conditions, + }, + SubSelectionValue { + selection_set: primary, + type_name, + }, + ); } } Ok(shared.subselections) diff --git a/apollo-router/src/spec/query/tests.rs b/apollo-router/src/spec/query/tests.rs index bb13004402..1d97962405 100644 --- a/apollo-router/src/spec/query/tests.rs +++ b/apollo-router/src/spec/query/tests.rs @@ -131,7 +131,6 @@ impl FormatTest { query.format_response( &mut response, - self.operation, self.variables .unwrap_or_else(|| Value::Object(Object::default())) .as_object() @@ -3506,10 +3505,8 @@ fn it_statically_includes() { &Default::default(), ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 1); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 1); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("product")), _ => panic!("expected a field"), } @@ -3530,14 +3527,12 @@ fn it_statically_includes() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 2); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 2); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("review")), _ => panic!("expected a field"), } - match operation.selection_set.get(1).unwrap() { + match query.operation.selection_set.get(1).unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("product")), _ => panic!("expected a field"), } @@ -3561,10 +3556,8 @@ fn it_statically_includes() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 1); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 1); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, selection_set: Some(selection_set), @@ -3597,14 +3590,12 @@ fn it_statically_includes() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 2); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 2); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("review")), _ => panic!("expected a field"), } - match operation.selection_set.get(1).unwrap() { + match query.operation.selection_set.get(1).unwrap() { Selection::Field { name, selection_set: Some(selection_set), @@ -3655,10 +3646,8 @@ fn it_statically_skips() { &Default::default(), ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 1); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 1); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("product")), _ => panic!("expected a field"), } @@ -3679,14 +3668,12 @@ fn it_statically_skips() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 2); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 2); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("review")), _ => panic!("expected a field"), } - match operation.selection_set.get(1).unwrap() { + match query.operation.selection_set.get(1).unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("product")), _ => panic!("expected a field"), } @@ -3710,10 +3697,8 @@ fn it_statically_skips() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 1); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 1); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, selection_set: Some(selection_set), @@ -3746,14 +3731,12 @@ fn it_statically_skips() { ) .expect("could not parse query"); - assert_eq!(query.operations.len(), 1); - let operation = &query.operations[0]; - assert_eq!(operation.selection_set.len(), 2); - match operation.selection_set.first().unwrap() { + assert_eq!(query.operation.selection_set.len(), 2); + match query.operation.selection_set.first().unwrap() { Selection::Field { name, .. } => assert_eq!(name, &ByteString::from("review")), _ => panic!("expected a field"), } - match operation.selection_set.get(1).unwrap() { + match query.operation.selection_set.get(1).unwrap() { Selection::Field { name, selection_set: Some(selection_set), @@ -5132,7 +5115,6 @@ fn fragment_on_interface_on_query() { query.format_response( &mut response, - None, Default::default(), api_schema, BooleanValues { bits: 0 }, @@ -5666,7 +5648,6 @@ fn test_error_path_works_across_inline_fragments() { .unwrap(); assert!(query.contains_error_path( - None, &None, &Path::from("rootType/edges/0/node/subType/edges/0/node/myField"), BooleanValues { bits: 0 } @@ -5713,7 +5694,7 @@ fn test_query_not_named_query() { ) .unwrap(); let query = Query::parse("{ example }", None, &schema, &config).unwrap(); - let selection = &query.operations[0].selection_set[0]; + let selection = &query.operation.selection_set[0]; assert!( matches!( selection, @@ -5784,12 +5765,12 @@ fn filtered_defer_fragment() { .parse_ast(filtered_query, "filtered_query.graphql") .unwrap(); let doc = ast.to_executable(schema.supergraph_schema()).unwrap(); - let (fragments, operations, defer_stats, schema_aware_hash) = + let (fragments, operation, defer_stats, schema_aware_hash) = Query::extract_query_information(&schema, &doc, None).unwrap(); let subselections = crate::spec::query::subselections::collect_subselections( &config, - &operations, + &operation, &fragments.map, &defer_stats, ) @@ -5797,7 +5778,7 @@ fn filtered_defer_fragment() { let mut query = Query { string: query.to_string(), fragments, - operations, + operation, filtered_query: None, subselections, defer_stats, @@ -5810,12 +5791,12 @@ fn filtered_defer_fragment() { .parse_ast(filtered_query, "filtered_query.graphql") .unwrap(); let doc = ast.to_executable(schema.supergraph_schema()).unwrap(); - let (fragments, operations, defer_stats, schema_aware_hash) = + let (fragments, operation, defer_stats, schema_aware_hash) = Query::extract_query_information(&schema, &doc, None).unwrap(); let subselections = crate::spec::query::subselections::collect_subselections( &config, - &operations, + &operation, &fragments.map, &defer_stats, ) @@ -5824,7 +5805,7 @@ fn filtered_defer_fragment() { let filtered = Query { string: filtered_query.to_string(), fragments, - operations, + operation, filtered_query: None, subselections, defer_stats, @@ -5845,7 +5826,6 @@ fn filtered_defer_fragment() { query.filtered_query.as_ref().unwrap().format_response( &mut response, - None, Object::new(), schema.api_schema(), BooleanValues { bits: 0 }, @@ -5855,7 +5835,6 @@ fn filtered_defer_fragment() { query.format_response( &mut response, - None, Object::new(), schema.api_schema(), BooleanValues { bits: 0 }, diff --git a/apollo-router/tests/integration/operation_name.rs b/apollo-router/tests/integration/operation_name.rs index 1359b54398..6d2ef81226 100644 --- a/apollo-router/tests/integration/operation_name.rs +++ b/apollo-router/tests/integration/operation_name.rs @@ -14,9 +14,15 @@ async fn empty_document() { { "errors": [ { - "message": "Syntax Error: Unexpected .", + "message": "parsing error: syntax error: Unexpected .", + "locations": [ + { + "line": 1, + "column": 27 + } + ], "extensions": { - "code": "GRAPHQL_PARSE_FAILED" + "code": "PARSING_ERROR" } } ]