From 5428f20a6a75bb6ca9d642ec7e12611f67f89904 Mon Sep 17 00:00:00 2001 From: Ingvar Stepanyan Date: Mon, 15 Jan 2024 14:13:19 +0000 Subject: [PATCH] Documentation fixes --- src/api/autogen/AlpacaDeviceAPI_v1.yaml | 60 ++++------ src/api/autogen/index.ts | 7 +- src/api/mod.rs | 142 ++++++++++++------------ src/lib.rs | 10 +- src/macros.rs | 11 +- 5 files changed, 111 insertions(+), 119 deletions(-) diff --git a/src/api/autogen/AlpacaDeviceAPI_v1.yaml b/src/api/autogen/AlpacaDeviceAPI_v1.yaml index 195f68e..08dfb62 100644 --- a/src/api/autogen/AlpacaDeviceAPI_v1.yaml +++ b/src/api/autogen/AlpacaDeviceAPI_v1.yaml @@ -72,16 +72,14 @@ paths: description: >- Actions and SupportedActions are a standardised means for drivers to extend functionality beyond the built-in capabilities of the ASCOM device interfaces. - The key advantage of using Actions is that drivers can expose any device specific functionality required. The downside is that, in order to use these unique features, every application author would need to create bespoke code to present or exploit them. - The Action parameter and return strings are deceptively simple, but can support transmission of arbitrarily complex data structures, for example through JSON encoding. - This capability will be of primary value to - * bespoke software and hardware configurations where a single entity controls both the consuming application software and the hardware / driver environment - * a group of application and device authors to quickly formulate and try out new interface capabilities without requiring an immediate change to the ASCOM device interface, which will take a lot longer than just agreeing a name, input parameters and a standard response for an Action command. + This capability will be of primary value to: + - bespoke software and hardware configurations where a single entity controls both the consuming application software and the hardware / driver environment. + - a group of application and device authors to quickly formulate and try out new interface capabilities without requiring an immediate change to the ASCOM device interface, which will take a lot longer than just agreeing a name, input parameters and a standard response for an Action command. The list of Action commands supported by a driver can be discovered through the SupportedActions property. @@ -1023,66 +1021,56 @@ paths: from the image's row and column perspective, while, from the array's perspective, serialisation is actually effected in "row-major" order (rightmost index changes most rapidly). This unintuitive outcome arises because the ASCOM Camera Interface specification defines the image column dimension as the rightmost array dimension. + ```text [ + [P00,P01,P02,P03,P04], - [P00,P01,P02,P03,P04], - - - [P10,P11,P12,P13,P14], - - - [P20,P21,P22,P23,P24], - + [P10,P11,P12,P13,P14], - [P30,P31,P32,P33,P34], + [P20,P21,P22,P23,P24], + [P30,P31,P32,P33,P34], - [P40,P41,P42,P43,P44], + [P40,P41,P42,P43,P44], + [P50,P51,P52,P53,P54], - [P50,P51,P52,P53,P54], - - - [P60,P61,P62,P63,P64] - + [P60,P61,P62,P63,P64] ] + ``` + When the SensorType is Color, the serialised JSON array will have 3 dimensions. For example, the returned array should appear as below if NumX = 7, NumY = 5 and Rxy, Gxy and Bxy represent the red, green and blue pixel values at the zero based position x across and y down the image with the origin in the top left corner of the image. Please see note above regarding element ordering. + ```text [ + [[R00,G00,B00],[R01,G01,B01],[R02,G02,B02],[R03,G03,B03],[R04,G04,B04]], - [[R00,G00,B00],[R01,G01,B01],[R02,G02,B02],[R03,G03,B03],[R04,G04,B04]], - + [[R10,G10,B10],[R11,G11,B11],[R12,G12,B12],[R13,G13,B13],[R14,G14,B14]], - [[R10,G10,B10],[R11,G11,B11],[R12,G12,B12],[R13,G13,B13],[R14,G14,B14]], + [[R20,G20,B20],[R21,G21,B21],[R22,G22,B22],[R23,G23,B23],[R24,G24,B24]], + [[R30,G30,B30],[R31,G31,B31],[R32,G32,B32],[R33,G33,B33],[R34,G34,B34]], - [[R20,G20,B20],[R21,G21,B21],[R22,G22,B22],[R23,G23,B23],[R24,G24,B24]], - + [[R40,G40,B40],[R41,G41,B41],[R42,G42,B42],[R43,G43,B43],[R44,G44,B44]], - [[R30,G30,B30],[R31,G31,B31],[R32,G32,B32],[R33,G33,B33],[R34,G34,B34]], - - - [[R40,G40,B40],[R41,G41,B41],[R42,G42,B42],[R43,G43,B43],[R44,G44,B44]], - - - [[R50,G50,B50],[R51,G51,B51],[R52,G52,B52],[R53,G53,B53],[R54,G54,B54]], - - - [[R60,G60,B60],[R61,G61,B61],[R62,G62,B62],[R63,G63,B63],[R64,G64,B64]], + [[R50,G50,B50],[R51,G51,B51],[R52,G52,B52],[R53,G53,B53],[R54,G54,B54]], + [[R60,G60,B60],[R61,G61,B61],[R62,G62,B62],[R63,G63,B63],[R64,G64,B64]], ] + ``` - __`Performance`__ + + # Performance Returning an image from an Alpaca device as a JSON array is very inefficient and can result in delays of 30 or more seconds while client and device process and send the huge JSON string over the network. diff --git a/src/api/autogen/index.ts b/src/api/autogen/index.ts index fb2ddc7..a5c1eae 100644 --- a/src/api/autogen/index.ts +++ b/src/api/autogen/index.ts @@ -638,7 +638,10 @@ function stringifyIter( function stringifyDoc(doc: string | undefined = '') { doc = doc.trim(); if (!doc) return ''; - return doc.includes('\n') ? `/**\n${doc}\n*/` : `/// ${doc}`; + return doc + .split(/\r?\n/) + .map(line => `/// ${line}`) + .join('\n'); } let rendered = ` @@ -778,7 +781,7 @@ ${stringifyIter(types, ({ features, type }) => { ${cfg} impl ${type.name} { - const FORMAT: &[time::format_description::FormatItem<'static>] = time::macros::format_description!("${ + const FORMAT: &'static [time::format_description::FormatItem<'static>] = time::macros::format_description!("${ type.format }"); diff --git a/src/api/mod.rs b/src/api/mod.rs index 57173ce..76cd505 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -553,22 +553,18 @@ pub trait Device: std::fmt::Debug + Send + Sync { } }; - /** - Actions and SupportedActions are a standardised means for drivers to extend functionality beyond the built-in capabilities of the ASCOM device interfaces. - - The key advantage of using Actions is that drivers can expose any device specific functionality required. The downside is that, in order to use these unique features, every application author would need to create bespoke code to present or exploit them. - - The Action parameter and return strings are deceptively simple, but can support transmission of arbitrarily complex data structures, for example through JSON encoding. - - This capability will be of primary value to - * bespoke software and hardware configurations where a single entity controls both the consuming application software and the hardware / driver environment - * a group of application and device authors to quickly formulate and try out new interface capabilities without requiring an immediate change to the ASCOM device interface, which will take a lot longer than just agreeing a name, input parameters and a standard response for an Action command. - - - The list of Action commands supported by a driver can be discovered through the SupportedActions property. - - This method should return an error message and NotImplementedException error number (0x400) if the driver just implements the standard ASCOM device methods and has no bespoke, unique, functionality. - */ + /// Actions and SupportedActions are a standardised means for drivers to extend functionality beyond the built-in capabilities of the ASCOM device interfaces. + /// The key advantage of using Actions is that drivers can expose any device specific functionality required. The downside is that, in order to use these unique features, every application author would need to create bespoke code to present or exploit them. + /// The Action parameter and return strings are deceptively simple, but can support transmission of arbitrarily complex data structures, for example through JSON encoding. + /// + /// This capability will be of primary value to: + /// - bespoke software and hardware configurations where a single entity controls both the consuming application software and the hardware / driver environment. + /// - a group of application and device authors to quickly formulate and try out new interface capabilities without requiring an immediate change to the ASCOM device interface, which will take a lot longer than just agreeing a name, input parameters and a standard response for an Action command. + /// + /// + /// The list of Action commands supported by a driver can be discovered through the SupportedActions property. + /// + /// This method should return an error message and NotImplementedException error number (0x400) if the driver just implements the standard ASCOM device methods and has no bespoke, unique, functionality. #[http("action", method = Put, via = ValueResponse)] async fn action( &self, @@ -885,63 +881,63 @@ pub trait Camera: Device + Send + Sync { Err(ASCOMError::NOT_IMPLEMENTED) } - /** - Returns an array of 32bit integers containing the pixel values from the last exposure. This call can return either a 2 dimension (monochrome images) or 3 dimension (colour or multi-plane images) array of size NumX \* NumY or NumX \* NumY \* NumPlanes. Where applicable, the size of NumPlanes has to be determined by inspection of the returned Array. - - Since 32bit integers are always returned by this call, the returned JSON Type value (0 = Unknown, 1 = short(16bit), 2 = int(32bit), 3 = Double) is always 2. The number of planes is given in the returned Rank value. - - When de-serialising to an object it is essential to know the array Rank beforehand so that the correct data class can be used. This can be achieved through a regular expression or by direct parsing of the returned JSON string to extract the Type and Rank values before de-serialising. - - This regular expression accomplishes the extraction into two named groups Type and Rank, which can then be used to select the correct de-serialisation data class: - - __`^*"Type":(?\d*),"Rank":(?\d*)`__ - - When the SensorType is Monochrome, RGGB, CMYG, CMYG2 or LRGB, the serialised JSON array should have 2 dimensions. For example, the returned array should appear as below if NumX = 7, NumY = 5 and Pxy represents the pixel value at the zero based position x across and y down the image with the origin in the top left corner of the image. - - Please note that this is "column-major" order (column changes most rapidly) from the image's row and column perspective, while, from the array's perspective, serialisation is actually effected in "row-major" order (rightmost index changes most rapidly). This unintuitive outcome arises because the ASCOM Camera Interface specification defines the image column dimension as the rightmost array dimension. - - [ - - [P00,P01,P02,P03,P04], - - [P10,P11,P12,P13,P14], - - [P20,P21,P22,P23,P24], - - [P30,P31,P32,P33,P34], - - [P40,P41,P42,P43,P44], - - [P50,P51,P52,P53,P54], - - [P60,P61,P62,P63,P64] - - ] - - When the SensorType is Color, the serialised JSON array will have 3 dimensions. For example, the returned array should appear as below if NumX = 7, NumY = 5 and Rxy, Gxy and Bxy represent the red, green and blue pixel values at the zero based position x across and y down the image with the origin in the top left corner of the image. Please see note above regarding element ordering. - - [ - - [[R00,G00,B00],[R01,G01,B01],[R02,G02,B02],[R03,G03,B03],[R04,G04,B04]], - - [[R10,G10,B10],[R11,G11,B11],[R12,G12,B12],[R13,G13,B13],[R14,G14,B14]], - - [[R20,G20,B20],[R21,G21,B21],[R22,G22,B22],[R23,G23,B23],[R24,G24,B24]], - - [[R30,G30,B30],[R31,G31,B31],[R32,G32,B32],[R33,G33,B33],[R34,G34,B34]], - - [[R40,G40,B40],[R41,G41,B41],[R42,G42,B42],[R43,G43,B43],[R44,G44,B44]], - - [[R50,G50,B50],[R51,G51,B51],[R52,G52,B52],[R53,G53,B53],[R54,G54,B54]], - - [[R60,G60,B60],[R61,G61,B61],[R62,G62,B62],[R63,G63,B63],[R64,G64,B64]], - - ] - - __`Performance`__ - - Returning an image from an Alpaca device as a JSON array is very inefficient and can result in delays of 30 or more seconds while client and device process and send the huge JSON string over the network. A new, much faster mechanic called ImageBytes - [Alpaca ImageBytes Concepts and Implementation](https://www.ascom-standards.org/Developer/AlpacaImageBytes.pdf) has been developed that sends data as a binary byte stream and can offer a 10 to 20 fold reduction in transfer time. It is strongly recommended that Alpaca Cameras implement the ImageBytes mechanic as well as the JSON mechanic. - */ + /// Returns an array of 32bit integers containing the pixel values from the last exposure. This call can return either a 2 dimension (monochrome images) or 3 dimension (colour or multi-plane images) array of size NumX \* NumY or NumX \* NumY \* NumPlanes. Where applicable, the size of NumPlanes has to be determined by inspection of the returned Array. + /// + /// Since 32bit integers are always returned by this call, the returned JSON Type value (0 = Unknown, 1 = short(16bit), 2 = int(32bit), 3 = Double) is always 2. The number of planes is given in the returned Rank value. + /// + /// When de-serialising to an object it is essential to know the array Rank beforehand so that the correct data class can be used. This can be achieved through a regular expression or by direct parsing of the returned JSON string to extract the Type and Rank values before de-serialising. + /// + /// This regular expression accomplishes the extraction into two named groups Type and Rank, which can then be used to select the correct de-serialisation data class: + /// + /// __`^*"Type":(?\d*),"Rank":(?\d*)`__ + /// + /// When the SensorType is Monochrome, RGGB, CMYG, CMYG2 or LRGB, the serialised JSON array should have 2 dimensions. For example, the returned array should appear as below if NumX = 7, NumY = 5 and Pxy represents the pixel value at the zero based position x across and y down the image with the origin in the top left corner of the image. + /// + /// Please note that this is "column-major" order (column changes most rapidly) from the image's row and column perspective, while, from the array's perspective, serialisation is actually effected in "row-major" order (rightmost index changes most rapidly). This unintuitive outcome arises because the ASCOM Camera Interface specification defines the image column dimension as the rightmost array dimension. + /// ```text + /// [ + /// + /// [P00,P01,P02,P03,P04], + /// + /// [P10,P11,P12,P13,P14], + /// + /// [P20,P21,P22,P23,P24], + /// + /// [P30,P31,P32,P33,P34], + /// + /// [P40,P41,P42,P43,P44], + /// + /// [P50,P51,P52,P53,P54], + /// + /// [P60,P61,P62,P63,P64] + /// + /// ] + /// ``` + /// + /// When the SensorType is Color, the serialised JSON array will have 3 dimensions. For example, the returned array should appear as below if NumX = 7, NumY = 5 and Rxy, Gxy and Bxy represent the red, green and blue pixel values at the zero based position x across and y down the image with the origin in the top left corner of the image. Please see note above regarding element ordering. + /// ```text + /// [ + /// + /// [[R00,G00,B00],[R01,G01,B01],[R02,G02,B02],[R03,G03,B03],[R04,G04,B04]], + /// + /// [[R10,G10,B10],[R11,G11,B11],[R12,G12,B12],[R13,G13,B13],[R14,G14,B14]], + /// + /// [[R20,G20,B20],[R21,G21,B21],[R22,G22,B22],[R23,G23,B23],[R24,G24,B24]], + /// + /// [[R30,G30,B30],[R31,G31,B31],[R32,G32,B32],[R33,G33,B33],[R34,G34,B34]], + /// + /// [[R40,G40,B40],[R41,G41,B41],[R42,G42,B42],[R43,G43,B43],[R44,G44,B44]], + /// + /// [[R50,G50,B50],[R51,G51,B51],[R52,G52,B52],[R53,G53,B53],[R54,G54,B54]], + /// + /// [[R60,G60,B60],[R61,G61,B61],[R62,G62,B62],[R63,G63,B63],[R64,G64,B64]], + /// + /// ] + /// ``` + /// + /// # Performance + /// + /// Returning an image from an Alpaca device as a JSON array is very inefficient and can result in delays of 30 or more seconds while client and device process and send the huge JSON string over the network. A new, much faster mechanic called ImageBytes - [Alpaca ImageBytes Concepts and Implementation](https://www.ascom-standards.org/Developer/AlpacaImageBytes.pdf) has been developed that sends data as a binary byte stream and can offer a 10 to 20 fold reduction in transfer time. It is strongly recommended that Alpaca Cameras implement the ImageBytes mechanic as well as the JSON mechanic. #[http("imagearray", method = Get)] async fn image_array(&self) -> ASCOMResult { Err(ASCOMError::NOT_IMPLEMENTED) diff --git a/src/lib.rs b/src/lib.rs index e5c1120..b5312f7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -302,11 +302,7 @@ Licensed under either of - Apache License, Version 2.0 ([LICENSE-APACHE-2.0](LICENSE-APACHE-2.0)) - MIT license ([LICENSE-MIT](LICENSE-MIT)) */ -#![cfg_attr( - all(doc, feature = "nightly"), - feature(doc_auto_cfg, async_fn_in_trait), - allow(incomplete_features) -)] +#![cfg_attr(all(doc, feature = "nightly"), feature(doc_auto_cfg))] #![warn( clippy::pedantic, clippy::nursery, @@ -348,7 +344,9 @@ Licensed under either of clippy::single_match_else, clippy::type_repetition_in_bounds, clippy::let_underscore_untyped, - clippy::struct_excessive_bools + clippy::struct_excessive_bools, + // For cargo-rdme + rustdoc::redundant_explicit_links )] pub(crate) mod macros; diff --git a/src/macros.rs b/src/macros.rs index 38ad78d..2a8674d 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -39,7 +39,7 @@ macro_rules! rpc_trait { } ) => { $(# $attr)* - #[cfg_attr(not(all(doc, feature = "nightly")), async_trait::async_trait)] + #[async_trait::async_trait] #[allow(unused_variables)] $pub trait $trait_name: $($first_parent)::+ $(+ $($other_parents)::+)* { $( @@ -51,6 +51,13 @@ macro_rules! rpc_trait { $( $(#[doc = $doc])* + /// + /// Definition before the `#[async_trait]` expansion: + /// + /// ```ignore + #[doc = concat!("async fn ", stringify!($method_name), "(&self", $(", ", stringify!($param), ": ", stringify!($param_ty),)* ") -> ", stringify!($return_type))] + /// # { unimplemented!() } + /// ``` async fn $method_name( & $self $(, $param: $param_ty)* ) -> $return_type $default_body @@ -135,7 +142,7 @@ macro_rules! rpc_trait { } #[cfg(feature = "client")] - #[cfg_attr(not(all(doc, feature = "nightly")), async_trait::async_trait)] + #[async_trait::async_trait] impl $trait_name for $crate::client::RawDeviceClient { $( $(