diff --git a/docs/design/asset-canister-interface.md b/docs/design/asset-canister-interface.md index 130b97b276..a761376823 100644 --- a/docs/design/asset-canister-interface.md +++ b/docs/design/asset-canister-interface.md @@ -483,15 +483,14 @@ type SetAssetContentArguments = record { key: Key; content_encoding: text; chunk_ids: vec ChunkId; - asset_content: opt blob; + last_chunk: opt blob; sha256: opt blob; }; ``` This operation adds or changes a single content encoding for an asset. It also updates the modification time of the content encoding. -The content of the encoding can be specified with either `chunk_ids` with previously uploaded data, -or if `chunk_ids` contains no ids, then `asset_content` may contain the encoding's content directly. -If both `chunk_ids` and `asset_content` contains data, then `asset_content` is ignored. +The content of the encoding can be specified with `chunk_ids` and `last_chunk`. +If `last_chunk` is not `null`, then its content is used as the last chunk of the encoding. If `sha256` is not passed, the asset canister will compute the hash of the content. diff --git a/src/canisters/frontend/ic-asset/src/batch_upload/operations.rs b/src/canisters/frontend/ic-asset/src/batch_upload/operations.rs index e49e7780d7..76d5e03066 100644 --- a/src/canisters/frontend/ic-asset/src/batch_upload/operations.rs +++ b/src/canisters/frontend/ic-asset/src/batch_upload/operations.rs @@ -7,6 +7,7 @@ use crate::canister_api::types::batch_upload::common::{ UnsetAssetContentArguments, }; use crate::canister_api::types::batch_upload::v1::{BatchOperationKind, CommitBatchArguments}; +use crate::error::{AssembleCommitBatchArgumentError, SetEncodingError}; use candid::Nat; use std::collections::HashMap; @@ -20,7 +21,7 @@ pub(crate) async fn assemble_batch_operations( canister_assets: HashMap, asset_deletion_reason: AssetDeletionReason, canister_asset_properties: HashMap, -) -> Result, String> { +) -> Result, AssembleCommitBatchArgumentError> { let mut canister_assets = canister_assets; let mut operations = vec![]; @@ -33,7 +34,9 @@ pub(crate) async fn assemble_batch_operations( ); create_new_assets(&mut operations, project_assets, &canister_assets); unset_obsolete_encodings(&mut operations, project_assets, &canister_assets); - set_encodings(&mut operations, chunk_uploader, project_assets).await?; + set_encodings(&mut operations, chunk_uploader, project_assets) + .await + .map_err(AssembleCommitBatchArgumentError::SetEncodingFailed)?; update_properties(&mut operations, project_assets, &canister_asset_properties); Ok(operations) @@ -46,7 +49,7 @@ pub(crate) async fn assemble_commit_batch_arguments( asset_deletion_reason: AssetDeletionReason, canister_asset_properties: HashMap, batch_id: Nat, -) -> Result { +) -> Result { let operations = assemble_batch_operations( Some(chunk_uploader), &project_assets, @@ -163,51 +166,29 @@ pub(crate) async fn set_encodings( operations: &mut Vec, chunk_uploader: Option<&ChunkUploader<'_>>, project_assets: &HashMap, -) -> Result<(), String> { +) -> Result<(), SetEncodingError> { for (key, project_asset) in project_assets { for (content_encoding, v) in &project_asset.encodings { if v.already_in_place { continue; } - if let Some(uploader) = chunk_uploader { - match uploader - .uploader_ids_to_canister_chunk_ids(&v.uploader_chunk_ids) - .await - { - super::plumbing::UploaderIdMapping::Error(err) => return Err(err), - super::plumbing::UploaderIdMapping::CanisterChunkIds(chunk_ids) => operations - .push(BatchOperationKind::SetAssetContent( - SetAssetContentArguments { - key: key.clone(), - content_encoding: content_encoding.clone(), - chunk_ids, - asset_content: None, - sha256: Some(v.sha256.clone()), - }, - )), - super::plumbing::UploaderIdMapping::IncludeChunksDirectly(asset_content) => { - operations.push(BatchOperationKind::SetAssetContent( - SetAssetContentArguments { - key: key.clone(), - content_encoding: content_encoding.clone(), - chunk_ids: vec![], - asset_content: Some(asset_content.concat()), - sha256: Some(v.sha256.clone()), - }, - )) - } + let (chunk_ids, last_chunk) = match chunk_uploader { + Some(uploader) => { + uploader + .uploader_ids_to_canister_chunk_ids(&v.uploader_chunk_ids) + .await? } - } else { - operations.push(BatchOperationKind::SetAssetContent( - SetAssetContentArguments { - key: key.clone(), - content_encoding: content_encoding.clone(), - chunk_ids: vec![], - asset_content: None, - sha256: Some(v.sha256.clone()), - }, - )); + None => (vec![], None), }; + operations.push(BatchOperationKind::SetAssetContent( + SetAssetContentArguments { + key: key.clone(), + content_encoding: content_encoding.clone(), + chunk_ids, + last_chunk, + sha256: Some(v.sha256.clone()), + }, + )); } } Ok(()) diff --git a/src/canisters/frontend/ic-asset/src/batch_upload/plumbing.rs b/src/canisters/frontend/ic-asset/src/batch_upload/plumbing.rs index 05a0fdf5b3..fee8136752 100644 --- a/src/canisters/frontend/ic-asset/src/batch_upload/plumbing.rs +++ b/src/canisters/frontend/ic-asset/src/batch_upload/plumbing.rs @@ -9,6 +9,7 @@ use crate::error::CreateChunkError; use crate::error::CreateEncodingError; use crate::error::CreateEncodingError::EncodeContentFailed; use crate::error::CreateProjectAssetError; +use crate::error::SetEncodingError; use candid::Nat; use futures::future::try_join_all; use futures::TryFutureExt; @@ -49,22 +50,6 @@ pub(crate) struct ProjectAsset { pub(crate) encodings: HashMap, } -enum UploaderState { - Uploading, - /// Uploader has uploaded chunks - commit will reference chunk ids to specify asset content - FinalizedWithUploads, - /// Uploader has not uploaded chunks - commit will contain asset content directly - FinalizedWithoutUploads, -} - -pub(crate) enum UploaderIdMapping { - Error(String), - /// Chunks are uploaded to the canister with these ids - CanisterChunkIds(Vec), - /// Chunks are not uploaded and should be included in the SetAssetContent operations directly - IncludeChunksDirectly(Vec>), -} - type IdMapping = BTreeMap; type UploadQueue = Vec<(usize, Vec)>; pub(crate) struct ChunkUploader<'agent> { @@ -76,7 +61,6 @@ pub(crate) struct ChunkUploader<'agent> { // maps uploader_chunk_id to canister_chunk_id id_mapping: Arc>, upload_queue: Arc>, - uploader_state: Arc>, } impl<'agent> ChunkUploader<'agent> { @@ -89,7 +73,6 @@ impl<'agent> ChunkUploader<'agent> { bytes: Arc::new(AtomicUsize::new(0)), id_mapping: Arc::new(Mutex::new(BTreeMap::new())), upload_queue: Arc::new(Mutex::new(vec![])), - uploader_state: Arc::new(Mutex::new(UploaderState::Uploading)), } } @@ -127,24 +110,8 @@ impl<'agent> ChunkUploader<'agent> { &self, semaphores: &Semaphores, ) -> Result<(), CreateChunkError> { - let queue = self.upload_queue.lock().await; - let mut uploader_state = self.uploader_state.lock().await; - - // Can skip upload if every chunk submitted for uploading is still in the queue. - // Additionally, chunks in the queue are small enough that there is plenty of space in the commit message to include all of them. - let skip_upload = queue.len() == self.chunks.fetch_add(0, Ordering::SeqCst) - && queue.iter().map(|(_, chunk)| chunk.len()).sum::() < MAX_CHUNK_SIZE / 2; - drop(queue); - // Potential for further improvement: unconditional upload_chunks(MAX_CHUNK_SIZE / 2, usize::MAX, semaphores) - // Then allow mix of uploaded chunks and asset content that is part of the commit args. - - if skip_upload { - *uploader_state = UploaderState::FinalizedWithoutUploads; - } else { - self.upload_chunks(0, 0, semaphores).await?; - *uploader_state = UploaderState::FinalizedWithUploads; - } - Ok(()) + self.upload_chunks(MAX_CHUNK_SIZE / 2, usize::MAX, semaphores) + .await } pub(crate) fn bytes(&self) -> usize { @@ -154,48 +121,33 @@ impl<'agent> ChunkUploader<'agent> { self.chunks.load(Ordering::SeqCst) } - /// Call only after `finalize_upload` has completed + /// Call only after `finalize_upload` has completed. + /// Returns `(chunk_ids, Option)` pub(crate) async fn uploader_ids_to_canister_chunk_ids( &self, uploader_ids: &[usize], - ) -> UploaderIdMapping { - let uploader_state = self.uploader_state.lock().await; - match *uploader_state { - UploaderState::Uploading => UploaderIdMapping::Error( - "Bug: Tried to map uploader ids to canister ids before finalizing".to_string(), - ), - UploaderState::FinalizedWithUploads => { - let mapping = self.id_mapping.lock().await; - let ids = uploader_ids - .iter() - .map(|id| { - mapping - .get(id) - .expect("Chunk uploader did not upload all chunks but is not aware of it. This is a bug.") - .clone() - }) - .collect(); - UploaderIdMapping::CanisterChunkIds(ids) - } - UploaderState::FinalizedWithoutUploads => { - let queue = self.upload_queue.lock().await; - match uploader_ids + ) -> Result<(Vec, Option>), SetEncodingError> { + let mut chunk_ids = vec![]; + let mut last_chunk: Option> = None; + let mapping = self.id_mapping.lock().await; + let queue = self.upload_queue.lock().await; + for uploader_id in uploader_ids { + if let Some(item) = mapping.get(uploader_id) { + chunk_ids.push(item.clone()); + } else if let Some(last_chunk_data) = + queue .iter() - .map(|uploader_id| { - queue.iter().find_map(|(id, content)| { - if id == uploader_id { - Some(content.clone()) - } else { - None - } - }).ok_or_else(|| format!("Chunk uploader does not have a chunk with uploader id {uploader_id}. This is a bug.")) - }) - .collect() { - Ok(asset_content) => UploaderIdMapping::IncludeChunksDirectly(asset_content), - Err(err) => UploaderIdMapping::Error(err) - } + .find_map(|(id, data)| if id == uploader_id { Some(data) } else { None }) + { + match last_chunk.as_mut() { + Some(existing_data) => existing_data.extend(last_chunk_data.iter()), + None => last_chunk = Some(last_chunk_data.clone()), + } + } else { + return Err(SetEncodingError::UnknownUploaderChunkId(*uploader_id)); } } + Ok((chunk_ids, last_chunk)) } async fn add_to_upload_queue(&self, uploader_chunk_id: usize, contents: &[u8]) { diff --git a/src/canisters/frontend/ic-asset/src/canister_api/types/batch_upload/common.rs b/src/canisters/frontend/ic-asset/src/canister_api/types/batch_upload/common.rs index 657f440971..8b7d4e59b6 100644 --- a/src/canisters/frontend/ic-asset/src/canister_api/types/batch_upload/common.rs +++ b/src/canisters/frontend/ic-asset/src/canister_api/types/batch_upload/common.rs @@ -77,10 +77,10 @@ pub struct SetAssetContentArguments { pub key: String, /// The content encoding for which this content applies pub content_encoding: String, - /// The chunks to assign to this content + /// The chunks to assign to this content encoding pub chunk_ids: Vec, - /// If no chunks are assigned to this asset, then `asset_content` is the asset content - pub asset_content: Option>, + /// Appends this chunk to the data supplied in `chunk_ids` + pub last_chunk: Option>, /// The sha256 of the entire content pub sha256: Option>, } diff --git a/src/canisters/frontend/ic-asset/src/error/compute_evidence.rs b/src/canisters/frontend/ic-asset/src/error/compute_evidence.rs index 92beddf70e..4c127d1af1 100644 --- a/src/canisters/frontend/ic-asset/src/error/compute_evidence.rs +++ b/src/canisters/frontend/ic-asset/src/error/compute_evidence.rs @@ -5,9 +5,15 @@ use crate::error::hash_content::HashContentError; use ic_agent::AgentError; use thiserror::Error; +use super::AssembleCommitBatchArgumentError; + /// Errors related to computing evidence for a proposed update. #[derive(Error, Debug)] pub enum ComputeEvidenceError { + /// Failed when assembling commit_batch argument. + #[error(transparent)] + AssembleCommitBatchArgumentFailed(#[from] AssembleCommitBatchArgumentError), + /// Failed when inspecting assets to be updated. #[error(transparent)] ProcessProjectAsset(#[from] CreateProjectAssetError), @@ -20,10 +26,6 @@ pub enum ComputeEvidenceError { #[error(transparent)] GetAssetProperties(#[from] GetAssetPropertiesError), - /// Failed when assembling commit_batch argument. - #[error("Failed to assemble commit_batch argument: {0}")] - AssembleCommitBatchArgumentError(String), - /// Failed when computing hashes of asset content. #[error(transparent)] HashContent(#[from] HashContentError), diff --git a/src/canisters/frontend/ic-asset/src/error/mod.rs b/src/canisters/frontend/ic-asset/src/error/mod.rs index 6162a8e0d1..bd2ac50c35 100644 --- a/src/canisters/frontend/ic-asset/src/error/mod.rs +++ b/src/canisters/frontend/ic-asset/src/error/mod.rs @@ -1,5 +1,6 @@ //! Error types +mod assemble_commit_batch_argument; mod compatibility; mod compute_evidence; mod create_chunk; @@ -13,10 +14,12 @@ mod hash_content; mod load_config; mod load_rule; mod prepare_sync_for_proposal; +mod set_encoding; mod sync; mod upload; mod upload_content; +pub use assemble_commit_batch_argument::AssembleCommitBatchArgumentError; pub use compatibility::CompatibilityError; pub use compute_evidence::ComputeEvidenceError; pub use create_chunk::CreateChunkError; @@ -30,6 +33,7 @@ pub use hash_content::HashContentError; pub use load_config::AssetLoadConfigError; pub use load_rule::LoadRuleError; pub use prepare_sync_for_proposal::PrepareSyncForProposalError; +pub use set_encoding::SetEncodingError; pub use sync::SyncError; pub use upload::UploadError; pub use upload_content::UploadContentError; diff --git a/src/canisters/frontend/ic-asset/src/error/upload.rs b/src/canisters/frontend/ic-asset/src/error/upload.rs index ce4b4fe13d..d427d15680 100644 --- a/src/canisters/frontend/ic-asset/src/error/upload.rs +++ b/src/canisters/frontend/ic-asset/src/error/upload.rs @@ -1,3 +1,4 @@ +use super::AssembleCommitBatchArgumentError; use crate::error::compatibility::CompatibilityError; use crate::error::create_project_asset::CreateProjectAssetError; use ic_agent::AgentError; @@ -20,7 +21,7 @@ pub enum UploadError { /// Failed when assembling commit_batch argument. #[error("Failed to assemble commit_batch argument: {0}")] - AssembleCommitBatchArgumentError(String), + AssembleCommitBatchArgumentFailed(#[from] AssembleCommitBatchArgumentError), /// Failed when creating project assets. #[error("Failed to create project asset: {0}")] diff --git a/src/canisters/frontend/ic-asset/src/error/upload_content.rs b/src/canisters/frontend/ic-asset/src/error/upload_content.rs index 7fc72d021e..0af8864062 100644 --- a/src/canisters/frontend/ic-asset/src/error/upload_content.rs +++ b/src/canisters/frontend/ic-asset/src/error/upload_content.rs @@ -4,9 +4,15 @@ use crate::error::get_asset_properties::GetAssetPropertiesError; use ic_agent::AgentError; use thiserror::Error; +use super::AssembleCommitBatchArgumentError; + /// Errors related to uploading content to the asset canister. #[derive(Error, Debug)] pub enum UploadContentError { + /// Failed when assembling commit_batch argument. + #[error("Failed to assemble commit_batch argument: {0}")] + AssembleCommitBatchArgumentFailed(AssembleCommitBatchArgumentError), + /// Failed when calling create_batch. #[error("Failed to create batch: {0}")] CreateBatchFailed(AgentError), @@ -15,10 +21,6 @@ pub enum UploadContentError { #[error("Failed to create project asset: {0}")] CreateProjectAssetError(#[from] CreateProjectAssetError), - /// Failed when assembling commit_batch argument. - #[error("Failed to assemble commit_batch argument: {0}")] - AssembleCommitBatchArgumentError(String), - /// Failed when building list of assets to synchronize. #[error("Failed to gather asset descriptors: {0}")] GatherAssetDescriptorsFailed(#[from] GatherAssetDescriptorsError), diff --git a/src/canisters/frontend/ic-asset/src/evidence/mod.rs b/src/canisters/frontend/ic-asset/src/evidence/mod.rs index 263e418130..7d73dbbf1d 100644 --- a/src/canisters/frontend/ic-asset/src/evidence/mod.rs +++ b/src/canisters/frontend/ic-asset/src/evidence/mod.rs @@ -66,7 +66,7 @@ pub async fn compute_evidence( canister_asset_properties, ) .await - .map_err(ComputeEvidenceError::AssembleCommitBatchArgumentError)?; + .map_err(ComputeEvidenceError::AssembleCommitBatchArgumentFailed)?; operations.sort(); let mut sha = Sha256::new(); diff --git a/src/canisters/frontend/ic-asset/src/sync.rs b/src/canisters/frontend/ic-asset/src/sync.rs index d26171611f..c5513f1bb4 100644 --- a/src/canisters/frontend/ic-asset/src/sync.rs +++ b/src/canisters/frontend/ic-asset/src/sync.rs @@ -99,7 +99,7 @@ pub async fn upload_content_and_assemble_sync_operations( batch_id, ) .await - .map_err(UploadContentError::AssembleCommitBatchArgumentError)?; + .map_err(UploadContentError::AssembleCommitBatchArgumentFailed)?; // -v debug!( diff --git a/src/canisters/frontend/ic-asset/src/upload.rs b/src/canisters/frontend/ic-asset/src/upload.rs index 64f2905c91..7aafee1b2e 100644 --- a/src/canisters/frontend/ic-asset/src/upload.rs +++ b/src/canisters/frontend/ic-asset/src/upload.rs @@ -12,8 +12,7 @@ use crate::canister_api::methods::{ }; use crate::canister_api::types::batch_upload::v0; use crate::error::CompatibilityError::DowngradeV1TOV0Failed; -use crate::error::UploadError; -use crate::error::UploadError::{CommitBatchFailed, CreateBatchFailed, ListAssetsFailed}; +use crate::error::UploadError::{self, CommitBatchFailed, CreateBatchFailed, ListAssetsFailed}; use ic_utils::Canister; use slog::{info, Logger}; use std::collections::HashMap; @@ -63,7 +62,7 @@ pub async fn upload( batch_id, ) .await - .map_err(UploadError::AssembleCommitBatchArgumentError)?; + .map_err(UploadError::AssembleCommitBatchArgumentFailed)?; let canister_api_version = api_version(canister).await; info!(logger, "Committing batch."); diff --git a/src/canisters/frontend/ic-certified-assets/src/evidence.rs b/src/canisters/frontend/ic-certified-assets/src/evidence.rs index 43f18f5737..c62b504dcb 100644 --- a/src/canisters/frontend/ic-certified-assets/src/evidence.rs +++ b/src/canisters/frontend/ic-certified-assets/src/evidence.rs @@ -143,7 +143,7 @@ fn next_chunk_index( hasher, }; } - } else if let Some(chunk_content) = sac.asset_content.as_ref() { + } else if let Some(chunk_content) = sac.last_chunk.as_ref() { hash_chunk_by_content(&mut hasher, chunk_content); } } diff --git a/src/canisters/frontend/ic-certified-assets/src/state_machine.rs b/src/canisters/frontend/ic-certified-assets/src/state_machine.rs index 31596a7651..226a2c1ade 100644 --- a/src/canisters/frontend/ic-certified-assets/src/state_machine.rs +++ b/src/canisters/frontend/ic-certified-assets/src/state_machine.rs @@ -383,10 +383,8 @@ impl State { arg: SetAssetContentArguments, now: u64, ) -> Result<(), String> { - if arg.chunk_ids.is_empty() && arg.asset_content.is_none() { - return Err( - "encoding must have at least one chunk or contain asset_content".to_string(), - ); + if arg.chunk_ids.is_empty() && arg.last_chunk.is_none() { + return Err("encoding must have at least one chunk or contain last_chunk".to_string()); } let dependent_keys = self.dependent_keys(&arg.key); @@ -403,7 +401,8 @@ impl State { let chunk = self.chunks.remove(chunk_id).expect("chunk not found"); content_chunks.push(chunk.content); } - } else if let Some(encoding_content) = arg.asset_content { + } + if let Some(encoding_content) = arg.last_chunk { content_chunks.push(encoding_content.into()); } @@ -662,11 +661,14 @@ impl State { Ok(()) } - fn commit_batch_check_limits(&self, arg: &CommitBatchArguments) -> Result<(), String> { - let (new_chunk_amount, new_byte_amount) = fold( + /// Computes the data required to perform `self.check_batch_limits` against + /// the data carried in `last_chunk` fields. + fn compute_last_chunk_data(&self, arg: &CommitBatchArguments) -> (usize, usize) { + fold( arg.operations.iter().map(|op| { if let BatchOperation::SetAssetContent(SetAssetContentArguments { - asset_content: Some(content), + last_chunk: Some(content), + // Chunks defined in `chunk_ids` are already accounted for and can be ignored here .. }) = op { @@ -676,19 +678,20 @@ impl State { } }), (0, 0), - |(chunk_count, byte_count), asset_len| { + |(chunks_added, bytes_added), asset_len| { if let Some(len) = asset_len { - (chunk_count + 1, byte_count + len) + (chunks_added + 1, bytes_added + len) } else { - (chunk_count, byte_count) + (chunks_added, bytes_added) } }, - ); - self.check_batch_limits(new_chunk_amount, new_byte_amount) + ) } pub fn commit_batch(&mut self, arg: CommitBatchArguments, now: u64) -> Result<(), String> { - self.commit_batch_check_limits(&arg)?; + let (chunks_added, bytes_added) = self.compute_last_chunk_data(&arg); + self.check_batch_limits(chunks_added, bytes_added)?; + let batch_id = arg.batch_id; for op in arg.operations { match op { diff --git a/src/canisters/frontend/ic-certified-assets/src/tests.rs b/src/canisters/frontend/ic-certified-assets/src/tests.rs index 60a11357f6..c2a3904c76 100644 --- a/src/canisters/frontend/ic-certified-assets/src/tests.rs +++ b/src/canisters/frontend/ic-certified-assets/src/tests.rs @@ -297,7 +297,7 @@ fn assemble_create_assets_and_set_contents_operations( key: asset.name.clone(), content_encoding: enc, chunk_ids, - asset_content: None, + last_chunk: None, sha256: None, } })); @@ -1995,7 +1995,7 @@ mod evidence_computation { key: "/a/b/c".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_1], - asset_content: None, + last_chunk: None, sha256: None, }; let cba = CommitBatchArguments { @@ -2061,7 +2061,7 @@ mod evidence_computation { key: "/a/b/c".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_1, chunk_2], - asset_content: None, + last_chunk: None, sha256: None, }; let cba = CommitBatchArguments { @@ -2138,7 +2138,7 @@ mod evidence_computation { key: "/a/b/c".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![], - asset_content: None, + last_chunk: None, sha256: None, }; let cba = CommitBatchArguments { @@ -2761,7 +2761,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2783,7 +2783,7 @@ mod evidence_computation { key: "/2".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2812,7 +2812,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2834,7 +2834,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "gzip".to_string(), chunk_ids: vec![], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2875,7 +2875,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_1], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2906,7 +2906,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_2], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -2958,7 +2958,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_1, chunk_2], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -3000,7 +3000,7 @@ mod evidence_computation { key: "/1".to_string(), content_encoding: "identity".to_string(), chunk_ids: vec![chunk_1, chunk_2], - asset_content: None, + last_chunk: None, sha256: None, })], }) @@ -3032,7 +3032,7 @@ mod evidence_computation { operations: vec![SetAssetContent(SetAssetContentArguments { key: "/1".to_string(), content_encoding: "identity".to_string(), - asset_content: None, + last_chunk: None, chunk_ids: vec![], sha256: Some(sha256_1), })], @@ -3054,7 +3054,7 @@ mod evidence_computation { operations: vec![SetAssetContent(SetAssetContentArguments { key: "/1".to_string(), content_encoding: "identity".to_string(), - asset_content: None, + last_chunk: None, chunk_ids: vec![], sha256: Some(sha256_2), })], diff --git a/src/canisters/frontend/ic-certified-assets/src/types.rs b/src/canisters/frontend/ic-certified-assets/src/types.rs index 72606a3019..a164dc12c5 100644 --- a/src/canisters/frontend/ic-certified-assets/src/types.rs +++ b/src/canisters/frontend/ic-certified-assets/src/types.rs @@ -39,8 +39,8 @@ pub struct SetAssetContentArguments { pub key: AssetKey, pub content_encoding: String, pub chunk_ids: Vec, - /// Ignored unless `chunk_ids` is empty. - pub asset_content: Option, + /// If set: appended as the final chunk. + pub last_chunk: Option, pub sha256: Option, }